From 581e45eb9ca61cad4eb996cb9a7a5f189d5c10a7 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Thu, 5 Feb 2026 09:55:32 +0000 Subject: [PATCH 01/41] feat: SQL server beta and templates refactoring (#32) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/32 Reviewed-by: Andre_Harms Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- cmd/cmd/build/build.go | 63 +- .../templates/data_source_scaffold.gotmpl | 78 +- .../build/templates/functions_scaffold.gotmpl | 98 + .../build/templates/resource_scaffold.gotmpl | 390 +++- cmd/cmd/build/templates/util.gotmpl | 47 + cmd/cmd/build/templates/util_test.gotmpl | 97 + cmd/cmd/publish/architecture.go | 2 +- cmd/cmd/publish/templates/index.html.gompl | 11 + cmd/cmd/publish/templates/index.md.gompl | 34 + cmd/cmd/publish/templates/markdown.html.gompl | 79 + docs/data-sources/postgresflexalpha_flavor.md | 11 + .../data-sources/sqlserverflexalpha_flavor.md | 11 + .../sqlserverflexalpha_version.md | 35 - .../sqlserverflexbeta_database.md | 42 + docs/data-sources/sqlserverflexbeta_flavor.md | 47 + .../sqlserverflexbeta_instance.md | 77 + docs/index.md | 9 +- docs/resources/sqlserverflexbeta_database.md | 36 + docs/resources/sqlserverflexbeta_instance.md | 158 ++ .../data-source.tf | 8 + .../data-source.tf | 8 + .../data-source.tf | 5 + .../data-source.tf | 8 + .../data-source.tf | 4 + examples/provider/provider.tf | 10 +- .../resource.tf | 76 + go.mod | 21 + go.sum | 58 + sample/sqlserver/sqlserver.tf | 28 + ....yml.disabled => collation_config.yml.bak} | 2 +- .../sqlserverflex/alpha/database_config.yml | 16 +- ...sion_config.yml => version_config.yml.bak} | 2 +- .../beta/backup_config.yml.disabled | 13 + .../beta/collation_config.yml.disabled | 8 + .../sqlserverflex/beta/database_config.yml | 34 + .../sqlserverflex/beta/flavors_config.yml | 9 + .../sqlserverflex/beta/instance_config.yml | 28 + .../sqlserverflex/beta/user_config.yml | 24 + .../sqlserverflex/beta/version_config.yml.bak | 9 + .../sqlserverflexalpha/instance/datasource.go | 3 +- .../sqlserverflexalpha/instance/functions.go | 199 +- .../sqlserverflexalpha/instance/resource.go | 4 +- .../instance/resource_test.go | 690 +++--- .../services/sqlserverflexalpha/utils/util.go | 2 - .../sqlserverflexalpha/utils/util_test.go | 2 - .../sqlserverflexalpha/version/datasource.go | 71 - .../version_data_source_gen.go | 569 ----- .../sqlserverflexbeta/database/datasource.go | 150 ++ .../database_data_source_gen.go | 82 + .../databases_data_source_gen.go | 1180 ++++++++++ .../sqlserverflexbeta/database/resource.go | 426 ++++ .../resources_gen/database_resource_gen.go | 99 + .../sqlserverflexbeta/flavor/datasource.go | 335 +++ .../datasources_gen/flavor_data_source_gen.go | 1909 +++++++++++++++++ .../sqlserverflexbeta/flavor/functions.go | 65 + .../flavor/functions_test.go | 134 ++ .../sqlserverflexbeta/flavors/datasource.go | 118 + .../flavors_data_source_gen.go | 1909 +++++++++++++++++ .../sqlserverflexbeta/instance/datasource.go | 134 ++ .../instance_data_source_gen.go | 1579 ++++++++++++++ .../instances_data_source_gen.go | 1172 ++++++++++ .../sqlserverflexbeta/instance/functions.go | 306 +++ .../instance/planModifiers.yaml | 124 ++ .../sqlserverflexbeta/instance/resource.go | 521 +++++ .../resources_gen/instance_resource_gen.go | 1597 ++++++++++++++ .../sqlserverflexbeta/user/datasource.go | 118 + .../datasources_gen/user_data_source_gen.go | 1118 ++++++++++ .../sqlserverflexbeta/user/functions.go | 98 + .../sqlserverflexbeta/user/resource.go | 411 ++++ .../user/resources_gen/user_resource_gen.go | 111 + .../internal/wait/sqlserverflexbeta/wait.go | 159 ++ .../wait/sqlserverflexbeta/wait_test.go | 258 +++ stackit/provider.go | 15 +- tools/tools.go | 4 - 74 files changed, 15985 insertions(+), 1383 deletions(-) create mode 100644 cmd/cmd/build/templates/functions_scaffold.gotmpl create mode 100644 cmd/cmd/build/templates/util.gotmpl create mode 100644 cmd/cmd/build/templates/util_test.gotmpl create mode 100644 cmd/cmd/publish/templates/index.html.gompl create mode 100644 cmd/cmd/publish/templates/index.md.gompl create mode 100644 cmd/cmd/publish/templates/markdown.html.gompl delete mode 100644 docs/data-sources/sqlserverflexalpha_version.md create mode 100644 docs/data-sources/sqlserverflexbeta_database.md create mode 100644 docs/data-sources/sqlserverflexbeta_flavor.md create mode 100644 docs/data-sources/sqlserverflexbeta_instance.md create mode 100644 docs/resources/sqlserverflexbeta_database.md create mode 100644 docs/resources/sqlserverflexbeta_instance.md create mode 100644 examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf create mode 100644 examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf create mode 100644 examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf create mode 100644 examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf create mode 100644 examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf create mode 100644 examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf rename service_specs/sqlserverflex/alpha/{collation_config.yml.disabled => collation_config.yml.bak} (92%) rename service_specs/sqlserverflex/alpha/{version_config.yml => version_config.yml.bak} (92%) create mode 100644 service_specs/sqlserverflex/beta/backup_config.yml.disabled create mode 100644 service_specs/sqlserverflex/beta/collation_config.yml.disabled create mode 100644 service_specs/sqlserverflex/beta/database_config.yml create mode 100644 service_specs/sqlserverflex/beta/flavors_config.yml create mode 100644 service_specs/sqlserverflex/beta/instance_config.yml create mode 100644 service_specs/sqlserverflex/beta/user_config.yml create mode 100644 service_specs/sqlserverflex/beta/version_config.yml.bak delete mode 100644 stackit/internal/services/sqlserverflexalpha/version/datasource.go delete mode 100644 stackit/internal/services/sqlserverflexalpha/version/datasources_gen/version_data_source_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/database/datasource.go create mode 100644 stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/database/resource.go create mode 100644 stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/flavor/datasource.go create mode 100644 stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/flavor/functions.go create mode 100644 stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go create mode 100644 stackit/internal/services/sqlserverflexbeta/flavors/datasource.go create mode 100644 stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/instance/datasource.go create mode 100644 stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/instance/functions.go create mode 100644 stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml create mode 100644 stackit/internal/services/sqlserverflexbeta/instance/resource.go create mode 100644 stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/user/datasource.go create mode 100644 stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/user/functions.go create mode 100644 stackit/internal/services/sqlserverflexbeta/user/resource.go create mode 100644 stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go create mode 100644 stackit/internal/wait/sqlserverflexbeta/wait.go create mode 100644 stackit/internal/wait/sqlserverflexbeta/wait_test.go diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go index df038609..3351d5da 100644 --- a/cmd/cmd/build/build.go +++ b/cmd/cmd/build/build.go @@ -254,10 +254,12 @@ func (b *Builder) Build() error { } type templateData struct { - PackageName string - NameCamel string - NamePascal string - NameSnake string + PackageName string + PackageNameCamel string + PackageNamePascal string + NameCamel string + NamePascal string + NameSnake string } func fileExists(path string) bool { @@ -310,7 +312,7 @@ func createBoilerplate(rootFolder, folder string) error { foundRes = fileExists(resGoFile) if handleDS && !foundDS { - slog.Info("Creating missing datasource.go", "service", svc.Name(), "resource", resourceName) + slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName) if !ValidateSnakeCase(resourceName) { return errors.New("resource name is invalid") } @@ -318,13 +320,15 @@ func createBoilerplate(rootFolder, folder string) error { tplName := "data_source_scaffold.gotmpl" err = writeTemplateToFile( tplName, - path.Join(rootFolder, "tools", "templates", tplName), + path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName), path.Join(folder, svc.Name(), res.Name(), "datasource.go"), &templateData{ - PackageName: svc.Name(), - NameCamel: ToCamelCase(resourceName), - NamePascal: ToPascalCase(resourceName), - NameSnake: resourceName, + PackageName: svc.Name(), + PackageNameCamel: ToCamelCase(svc.Name()), + PackageNamePascal: ToPascalCase(svc.Name()), + NameCamel: ToCamelCase(resourceName), + NamePascal: ToPascalCase(resourceName), + NameSnake: resourceName, }, ) if err != nil { @@ -333,7 +337,7 @@ func createBoilerplate(rootFolder, folder string) error { } if handleRes && !foundRes { - slog.Info("Creating missing resource.go", "service", svc.Name(), "resource", resourceName) + slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName) if !ValidateSnakeCase(resourceName) { return errors.New("resource name is invalid") } @@ -341,18 +345,45 @@ func createBoilerplate(rootFolder, folder string) error { tplName := "resource_scaffold.gotmpl" err = writeTemplateToFile( tplName, - path.Join(rootFolder, "tools", "templates", tplName), + path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName), path.Join(folder, svc.Name(), res.Name(), "resource.go"), &templateData{ - PackageName: svc.Name(), - NameCamel: ToCamelCase(resourceName), - NamePascal: ToPascalCase(resourceName), - NameSnake: resourceName, + PackageName: svc.Name(), + PackageNameCamel: ToCamelCase(svc.Name()), + PackageNamePascal: ToPascalCase(svc.Name()), + NameCamel: ToCamelCase(resourceName), + NamePascal: ToPascalCase(resourceName), + NameSnake: resourceName, }, ) if err != nil { return err } + + if !fileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) { + slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName) + if !ValidateSnakeCase(resourceName) { + return errors.New("resource name is invalid") + } + fncTplName := "functions_scaffold.gotmpl" + err = writeTemplateToFile( + fncTplName, + path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName), + path.Join(folder, svc.Name(), res.Name(), "functions.go"), + &templateData{ + PackageName: svc.Name(), + PackageNameCamel: ToCamelCase(svc.Name()), + PackageNamePascal: ToPascalCase(svc.Name()), + NameCamel: ToCamelCase(resourceName), + NamePascal: ToPascalCase(resourceName), + NameSnake: resourceName, + }, + ) + if err != nil { + return err + } + + } } } } diff --git a/cmd/cmd/build/templates/data_source_scaffold.gotmpl b/cmd/cmd/build/templates/data_source_scaffold.gotmpl index d13021c7..74fc0f91 100644 --- a/cmd/cmd/build/templates/data_source_scaffold.gotmpl +++ b/cmd/cmd/build/templates/data_source_scaffold.gotmpl @@ -2,24 +2,32 @@ package {{.PackageName}} import ( "context" + "fmt" + "net/http" "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg/{{.PackageName}}" + {{.PackageName}}Pkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}" + + {{.PackageName}}Utils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/utils" {{.PackageName}}Gen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/datasources_gen" ) var _ datasource.DataSource = (*{{.NameCamel}}DataSource)(nil) +const errorPrefix = "[{{.PackageNamePascal}} - {{.NamePascal}}]" + func New{{.NamePascal}}DataSource() datasource.DataSource { return &{{.NameCamel}}DataSource{} } type {{.NameCamel}}DataSource struct{ - client *{{.PackageName}}.APIClient + client *{{.PackageName}}Pkg.APIClient providerData core.ProviderData } @@ -31,8 +39,28 @@ func (d *{{.NameCamel}}DataSource) Schema(ctx context.Context, _ datasource.Sche resp.Schema = {{.PackageName}}Gen.{{.NamePascal}}DataSourceSchema(ctx) } +// Configure adds the provider configured client to the data source. +func (d *{{.NameCamel}}DataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { + var ok bool + d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClient := {{.PackageName}}Utils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + d.client = apiClient + tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) +} + func (d *{{.NameCamel}}DataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data {{.PackageName}}Gen.{{.NameCamel}}Model + var data {{.PackageName}}Gen.{{.NamePascal}}Model // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) @@ -41,11 +69,51 @@ func (d *{{.NameCamel}}DataSource) Read(ctx context.Context, req datasource.Read return } + ctx = core.InitProviderContext(ctx) + + projectId := data.ProjectId.ValueString() + region := d.providerData.GetRegionWithOverride(data.Region) + {{.NameCamel}}Id := data.{{.NamePascal}}Id.ValueString() + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "{{.NameCamel}}_id", {{.NameCamel}}Id) + + {{.NameCamel}}Resp, err := d.client.Get{{.NamePascal}}Request(ctx, projectId, region, {{.NameCamel}}Id).Execute() + if err != nil { + utils.LogError( + ctx, + &resp.Diagnostics, + err, + "Reading {{.NameCamel}}", + fmt.Sprintf("{{.NameCamel}} with ID %q does not exist in project %q.", {{.NameCamel}}Id, projectId), + map[int]string{ + http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), + }, + ) + resp.State.RemoveResource(ctx) + return + } + + ctx = core.LogResponse(ctx) + + // Todo: Read API call logic // Example data value setting // data.Id = types.StringValue("example-id") + err = mapResponseToModel(ctx, {{.NameCamel}}Resp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + fmt.Sprintf("%s Read", errorPrefix), + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + // Save data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) } diff --git a/cmd/cmd/build/templates/functions_scaffold.gotmpl b/cmd/cmd/build/templates/functions_scaffold.gotmpl new file mode 100644 index 00000000..de4d2dbe --- /dev/null +++ b/cmd/cmd/build/templates/functions_scaffold.gotmpl @@ -0,0 +1,98 @@ +package {{.PackageName}} + +import ( + "context" + "fmt" + "math" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + + {{.PackageName}} "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}" + {{.PackageName}}ResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/instance/resources_gen" +) + +func mapResponseToModel( + ctx context.Context, + resp *{{.PackageName}}.Get{{.NamePascal}}Response, + m *{{.PackageName}}ResGen.{{.NamePascal}}Model, + tfDiags diag.Diagnostics, +) error { + // TODO: complete and refactor + m.Id = types.StringValue(resp.GetId()) + + /* + sampleList, diags := types.ListValueFrom(ctx, types.StringType, resp.GetList()) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf( + "error converting list response value", + ) + } + sample, diags := {{.PackageName}}ResGen.NewSampleValue( + {{.PackageName}}ResGen.SampleValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "field": types.StringValue(string(resp.GetField())), + }, + ) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf( + "error converting sample response value", + "sample", + types.StringValue(string(resp.GetField())), + ) + } + m.Sample = sample + */ + return nil +} + +func handleEncryption( + m *{{.PackageName}}ResGen.{{.NamePascal}}Model, + resp *{{.PackageName}}.Get{{.NamePascal}}Response, +) {{.PackageName}}ResGen.EncryptionValue { + if !resp.HasEncryption() || + resp.Encryption == nil || + resp.Encryption.KekKeyId == nil || + resp.Encryption.KekKeyRingId == nil || + resp.Encryption.KekKeyVersion == nil || + resp.Encryption.ServiceAccount == nil { + + if m.Encryption.IsNull() || m.Encryption.IsUnknown() { + return {{.PackageName}}ResGen.NewEncryptionValueNull() + } + return m.Encryption + } + + enc := {{.PackageName}}ResGen.NewEncryptionValueNull() + if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { + enc.KekKeyId = types.StringValue(kVal) + } + if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { + enc.KekKeyRingId = types.StringValue(kkVal) + } + if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { + enc.KekKeyVersion = types.StringValue(kkvVal) + } + if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { + enc.ServiceAccount = types.StringValue(sa) + } + return enc +} + +func toCreatePayload( + ctx context.Context, + model *{{.PackageName}}ResGen.{{.NamePascal}}Model, +) (*{{.PackageName}}.Create{{.NamePascal}}RequestPayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + return &{{.PackageName}}.Create{{.NamePascal}}RequestPayload{ + // TODO: fill fields + }, nil +} diff --git a/cmd/cmd/build/templates/resource_scaffold.gotmpl b/cmd/cmd/build/templates/resource_scaffold.gotmpl index 5c96fdae..e497c8ad 100644 --- a/cmd/cmd/build/templates/resource_scaffold.gotmpl +++ b/cmd/cmd/build/templates/resource_scaffold.gotmpl @@ -2,15 +2,24 @@ package {{.PackageName}} import ( "context" + _ "embed" + "fmt" + "strings" - "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" + "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}" - {{.PackageName}}Gen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/resources_gen" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + {{.PackageName}}ResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/resources_gen" ) var ( @@ -30,10 +39,10 @@ type {{.NameCamel}}Resource struct{ providerData core.ProviderData } -type InstanceResourceIdentityModel struct { +type {{.NamePascal}}ResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` - InstanceID types.String `tfsdk:"instance_id"` + {{.NamePascal}}ID types.String `tfsdk:"instance_id"` // TODO: implement further needed parts } @@ -42,7 +51,20 @@ func (r *{{.NameCamel}}Resource) Metadata(ctx context.Context, req resource.Meta } func (r *{{.NameCamel}}Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = {{.PackageName}}Gen.{{.NamePascal}}ResourceSchema(ctx) + schema = {{.PackageName}}ResGen.{{.NamePascal}}ResourceSchema(ctx) + + fields, err := {{.PackageName}}Utils.ReadModifiersConfig(modifiersFileByte) + if err != nil { + resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) + return + } + + err = {{.PackageName}}Utils.AddPlanModifiersToResourceSchema(fields, &schema) + if err != nil { + resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) + return + } + resp.Schema = schema } func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { @@ -78,112 +100,26 @@ func (r *{{.NameCamel}}Resource) Configure( config.WithCustomAuth(r.providerData.RoundTripper), utils.UserAgentConfigOption(r.providerData.Version), } - if r.providerData.PostgresFlexCustomEndpoint != "" { - apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.PostgresFlexCustomEndpoint)) + if r.providerData.{{.PackageNamePascal}}CustomEndpoint != "" { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.{{.PackageName}}CustomEndpoint)) } else { apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) } apiClient, err := {{.PackageName}}.NewAPIClient(apiClientConfigOptions...) if err != nil { - resp.Diagnostics.AddError( "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", err)) + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) return } r.client = apiClient tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} client configured") } -func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data {{.PackageName}}Gen.{{.NamePascal}}Model - - // Read Terraform plan data into the model - resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) - - if resp.Diagnostics.HasError() { - return - } - - // TODO: Create API call logic - - // Example data value setting - data.{{.NameCamel | ucfirst}}Id = types.StringValue("id-from-response") - - // TODO: Set data returned by API in identity - identity := InstanceResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - InstanceID: types.StringValue(instanceId), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { - return - } - - // TODO: implement wait handler if needed - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - - tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} created") -} - -func (r *{{.NameCamel}}Resource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data {{.PackageName}}Gen.{{.NamePascal}}Model - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - - // Read identity data - var identityData InstanceResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - - if resp.Diagnostics.HasError() { - return - } - - // Todo: Read API call logic - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - - tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} read") -} - -func (r *{{.NameCamel}}Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data {{.PackageName}}Gen.{{.NamePascal}}Model - - // Read Terraform plan data into the model - resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) - - if resp.Diagnostics.HasError() { - return - } - - // Todo: Update API call logic - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - - tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} updated") -} - -func (r *{{.NameCamel}}Resource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data {{.PackageName}}Gen.{{.NamePascal}}Model - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - - if resp.Diagnostics.HasError() { - return - } - - // Todo: Delete API call logic - - tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} deleted") -} - // ModifyPlan implements resource.ResourceWithModifyPlan. // Use the modifier to set the effective region in the current plan. func (r *{{.NameCamel}}Resource) ModifyPlan( @@ -191,17 +127,21 @@ func (r *{{.NameCamel}}Resource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - var configModel {{.PackageName}}Gen.{{.NamePascal}}Model + // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return } + var configModel {{.PackageName}}ResGen.{{.NamePascal}}Model resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) if resp.Diagnostics.HasError() { return } - var planModel {{.PackageName}}Gen.{{.NamePascal}}Model + if req.Plan.Raw.IsNull() { + return + } + var planModel {{.PackageName}}ResGen.{{.NamePascal}}Model resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) if resp.Diagnostics.HasError() { return @@ -212,12 +152,254 @@ func (r *{{.NameCamel}}Resource) ModifyPlan( return } + var identityModel {{.NamePascal}}ResourceIdentityModel + identityModel.ProjectID = planModel.ProjectId + identityModel.Region = planModel.Region + if !planModel.{{.NamePascal}}Id.IsNull() && !planModel.{{.NamePascal}}Id.IsUnknown() { + identityModel.{{.NamePascal}}ID = planModel.{{.NamePascal}}Id + } + + resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) if resp.Diagnostics.HasError() { return } } +//go:embed planModifiers.yaml +var modifiersFileByte []byte + +// Create creates a new resource +func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data {{.PackageName}}ResGen.{{.NamePascal}}Model + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData {{.NamePascal}}ResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // TODO: Create API call logic + /* + // Generate API request body from model + payload, err := toCreatePayload(ctx, &model) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating {{.NamePascal}}", + fmt.Sprintf("Creating API payload: %v", err), + ) + return + } + // Create new {{.NamePascal}} + createResp, err := r.client.Create{{.NamePascal}}Request( + ctx, + projectId, + region, + ).Create{{.NamePascal}}RequestPayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating {{.NamePascal}}", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + {{.NamePascal}}Id := *createResp.Id + */ + + // Example data value setting + data.{{.NameCamel | ucfirst}}Id = types.StringValue("id-from-response") + + // TODO: Set data returned by API in identity + identity := {{.NamePascal}}ResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + // TODO: add missing values + {{.NamePascal}}ID: types.StringValue({{.NamePascal}}Id), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + // TODO: implement wait handler if needed + /* + + waitResp, err := wait.Create{{.NamePascal}}WaitHandler( + ctx, + r.client, + projectId, + {{.NamePascal}}Id, + region, + ).SetSleepBeforeWait( + 30 * time.Second, + ).SetTimeout( + 90 * time.Minute, + ).WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating {{.NamePascal}}", + fmt.Sprintf("{{.NamePascal}} creation waiting: %v", err), + ) + return + } + + if waitResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating {{.NamePascal}}", + "{{.NamePascal}} creation waiting: returned id is nil", + ) + return + } + + // Map response body to schema + err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating {{.NamePascal}}", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + */ + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} created") +} + +func (r *{{.NameCamel}}Resource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data {{.PackageName}}ResGen.{{.NamePascal}}Model + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData {{.NamePascal}}ResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Todo: Read API call logic + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + // TODO: Set data returned by API in identity + identity := {{.NamePascal}}ResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + // InstanceID: types.StringValue(instanceId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} read") +} + +func (r *{{.NameCamel}}Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var data {{.PackageName}}ResGen.{{.NamePascal}}Model + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData {{.NamePascal}}ResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Todo: Update API call logic + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} updated") +} + +func (r *{{.NameCamel}}Resource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data {{.PackageName}}ResGen.{{.NamePascal}}Model + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData {{.NamePascal}}ResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Todo: Delete API call logic + + tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} deleted") +} + // ImportState imports a resource into the Terraform state on success. // The expected format of the resource import identifier is: project_id,zone_id,record_set_id func (r *{{.NameCamel}}Resource) ImportState( diff --git a/cmd/cmd/build/templates/util.gotmpl b/cmd/cmd/build/templates/util.gotmpl new file mode 100644 index 00000000..cecc8e9e --- /dev/null +++ b/cmd/cmd/build/templates/util.gotmpl @@ -0,0 +1,47 @@ +package utils + +import ( + "context" + "fmt" + + {{.PackageName}} "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +func ConfigureClient( + ctx context.Context, + providerData *core.ProviderData, + diags *diag.Diagnostics, +) *{{.PackageName}}.APIClient { + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(providerData.RoundTripper), + utils.UserAgentConfigOption(providerData.Version), + } + if providerData.{{.PackageName}}CustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(providerData.{{.PackageName}}CustomEndpoint), + ) + } else { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(providerData.GetRegion())) + } + apiClient, err := {{.PackageName}}.NewAPIClient(apiClientConfigOptions...) + if err != nil { + core.LogAndAddError( + ctx, + diags, + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return nil + } + + return apiClient +} diff --git a/cmd/cmd/build/templates/util_test.gotmpl b/cmd/cmd/build/templates/util_test.gotmpl new file mode 100644 index 00000000..567f2623 --- /dev/null +++ b/cmd/cmd/build/templates/util_test.gotmpl @@ -0,0 +1,97 @@ +package utils + +import ( + "context" + "os" + "reflect" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/diag" + sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients" + "github.com/stackitcloud/stackit-sdk-go/core/config" + {{.PackageName}} "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +const ( + testVersion = "1.2.3" + testCustomEndpoint = "https://sqlserverflex-custom-endpoint.api.stackit.cloud" +) + +func TestConfigureClient(t *testing.T) { + /* mock authentication by setting service account token env variable */ + os.Clearenv() + err := os.Setenv(sdkClients.ServiceAccountToken, "mock-val") + if err != nil { + t.Errorf("error setting env variable: %v", err) + } + + type args struct { + providerData *core.ProviderData + } + tests := []struct { + name string + args args + wantErr bool + expected *sqlserverflex.APIClient + }{ + { + name: "default endpoint", + args: args{ + providerData: &core.ProviderData{ + Version: testVersion, + }, + }, + expected: func() *sqlserverflex.APIClient { + apiClient, err := sqlserverflex.NewAPIClient( + config.WithRegion("eu01"), + utils.UserAgentConfigOption(testVersion), + ) + if err != nil { + t.Errorf("error configuring client: %v", err) + } + return apiClient + }(), + wantErr: false, + }, + { + name: "custom endpoint", + args: args{ + providerData: &core.ProviderData{ + Version: testVersion, + SQLServerFlexCustomEndpoint: testCustomEndpoint, + }, + }, + expected: func() *sqlserverflex.APIClient { + apiClient, err := sqlserverflex.NewAPIClient( + utils.UserAgentConfigOption(testVersion), + config.WithEndpoint(testCustomEndpoint), + ) + if err != nil { + t.Errorf("error configuring client: %v", err) + } + return apiClient + }(), + wantErr: false, + }, + } + for _, tt := range tests { + t.Run( + tt.name, func(t *testing.T) { + ctx := context.Background() + diags := diag.Diagnostics{} + + actual := ConfigureClient(ctx, tt.args.providerData, &diags) + if diags.HasError() != tt.wantErr { + t.Errorf("ConfigureClient() error = %v, want %v", diags.HasError(), tt.wantErr) + } + + if !reflect.DeepEqual(actual, tt.expected) { + t.Errorf("ConfigureClient() = %v, want %v", actual, tt.expected) + } + }, + ) + } +} diff --git a/cmd/cmd/publish/architecture.go b/cmd/cmd/publish/architecture.go index a2e6f6af..f77188c3 100644 --- a/cmd/cmd/publish/architecture.go +++ b/cmd/cmd/publish/architecture.go @@ -94,7 +94,7 @@ func (p *Provider) CreateArchitectureFiles() error { archFileName := path.Join(downloadPathPrefix, target, arch) a := Architecture{ - Protocols: []string{"5.1"}, + Protocols: []string{"5.1", "6.0"}, OS: target, Arch: arch, FileName: sum.Path, diff --git a/cmd/cmd/publish/templates/index.html.gompl b/cmd/cmd/publish/templates/index.html.gompl new file mode 100644 index 00000000..531032fe --- /dev/null +++ b/cmd/cmd/publish/templates/index.html.gompl @@ -0,0 +1,11 @@ + + + + Forwarding | Weiterleitung + + + +Falls Sie nicht automatisch weitergeleitet werden, klicken Sie bitte hier.
+Sie gelangen dann auf unsere Hauptseite + + diff --git a/cmd/cmd/publish/templates/index.md.gompl b/cmd/cmd/publish/templates/index.md.gompl new file mode 100644 index 00000000..3ebaa0e1 --- /dev/null +++ b/cmd/cmd/publish/templates/index.md.gompl @@ -0,0 +1,34 @@ +--- +page_title: STACKIT provider PrivatePreview +description: none +--- + +# provider +[Provider](docs/index.md) + +## PostGreSQL alpha +### data sources + +- [Flavor](docs/data-sources/postgresflexalpha_flavor.md) +- [Database](docs/data-sources/postgresflexalpha_database.md) +- [Instance](docs/data-sources/postgresflexalpha_instance.md) +- [Flavors](docs/data-sources/postgresflexalpha_flavors.md) +- [User](docs/data-sources/postgresflexalpha_user.md) + +### resources +- [Database](docs/resources/postgresflexalpha_database.md) +- [Instance](docs/resources/postgresflexalpha_instance.md) +- [User](docs/resources/postgresflexalpha_user.md) + +## SQL Server alpha +### data sources +- [Database](docs/data-sources/sqlserverflexalpha_database.md) +- [Version](docs/data-sources/sqlserverflexalpha_version.md) +- [User](docs/data-sources/sqlserverflexalpha_user.md) +- [Flavor](docs/data-sources/sqlserverflexalpha_flavor.md) +- [Instance](docs/data-sources/sqlserverflexalpha_instance.md) + +### resources +- [Database](docs/resources/sqlserverflexalpha_database.md) +- [User](docs/resources/sqlserverflexalpha_user.md) +- [Instance](docs/resources/sqlserverflexalpha_instance.md) diff --git a/cmd/cmd/publish/templates/markdown.html.gompl b/cmd/cmd/publish/templates/markdown.html.gompl new file mode 100644 index 00000000..d338b241 --- /dev/null +++ b/cmd/cmd/publish/templates/markdown.html.gompl @@ -0,0 +1,79 @@ + +{{ $mdFile := .OriginalReq.URL.Path | trimPrefix "/docs" }} +{{ $md := (include $mdFile | splitFrontMatter) }} + + + {{$md.Meta.page_title}} + + + + +

{{$md.Meta.page_title}}

+
+
+ + +
+ +
+
+
+
+
+
+
+ {{markdown $md.Body}} +
+
+
+
+
+
+
+
+ + diff --git a/docs/data-sources/postgresflexalpha_flavor.md b/docs/data-sources/postgresflexalpha_flavor.md index 4d28ffc3..24c79829 100644 --- a/docs/data-sources/postgresflexalpha_flavor.md +++ b/docs/data-sources/postgresflexalpha_flavor.md @@ -10,7 +10,18 @@ description: |- +## Example Usage +```terraform +data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" { + project_id = var.project_id + region = var.region + cpu = 4 + ram = 16 + node_type = "Single" + storage_class = "premium-perf2-stackit" +} +``` ## Schema diff --git a/docs/data-sources/sqlserverflexalpha_flavor.md b/docs/data-sources/sqlserverflexalpha_flavor.md index 426a0605..0dfc1fd2 100644 --- a/docs/data-sources/sqlserverflexalpha_flavor.md +++ b/docs/data-sources/sqlserverflexalpha_flavor.md @@ -10,7 +10,18 @@ description: |- +## Example Usage +```terraform +data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" { + project_id = var.project_id + region = var.region + cpu = 4 + ram = 16 + node_type = "Single" + storage_class = "premium-perf2-stackit" +} +``` ## Schema diff --git a/docs/data-sources/sqlserverflexalpha_version.md b/docs/data-sources/sqlserverflexalpha_version.md deleted file mode 100644 index c9c61732..00000000 --- a/docs/data-sources/sqlserverflexalpha_version.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexalpha_version Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexalpha_version (Data Source) - - - - - - -## Schema - -### Required - -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `versions` (Attributes List) A list containing available sqlserver versions. (see [below for nested schema](#nestedatt--versions)) - - -### Nested Schema for `versions` - -Read-Only: - -- `beta` (Boolean) Flag if the version is a beta version. If set the version may contain bugs and is not fully tested. -- `deprecated` (String) Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT. -- `recommend` (Boolean) Flag if the version is recommend by the STACKIT Team. -- `version` (String) The sqlserver version used for the instance. diff --git a/docs/data-sources/sqlserverflexbeta_database.md b/docs/data-sources/sqlserverflexbeta_database.md new file mode 100644 index 00000000..98a29d9f --- /dev/null +++ b/docs/data-sources/sqlserverflexbeta_database.md @@ -0,0 +1,42 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackitprivatepreview_sqlserverflexbeta_database Data Source - stackitprivatepreview" +subcategory: "" +description: |- + +--- + +# stackitprivatepreview_sqlserverflexbeta_database (Data Source) + + + +## Example Usage + +```terraform +data "stackitprivatepreview_sqlserverflexbeta_database" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + database_name = "dbname" +} +``` + + +## Schema + +### Required + +- `database_name` (String) The name of the database. +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. + +### Optional + +- `region` (String) The region which should be addressed + +### Read-Only + +- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. +- `compatibility_level` (Number) CompatibilityLevel of the Database. +- `id` (Number) The id of the database. +- `name` (String) The name of the database. +- `owner` (String) The owner of the database. diff --git a/docs/data-sources/sqlserverflexbeta_flavor.md b/docs/data-sources/sqlserverflexbeta_flavor.md new file mode 100644 index 00000000..6c1569be --- /dev/null +++ b/docs/data-sources/sqlserverflexbeta_flavor.md @@ -0,0 +1,47 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview" +subcategory: "" +description: |- + +--- + +# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source) + + + +## Example Usage + +```terraform +data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" { + project_id = var.project_id + region = var.region + cpu = 4 + ram = 16 + node_type = "Single" + storage_class = "premium-perf2-stackit" +} +``` + + +## Schema + +### Read-Only + +- `cpu` (Number) The cpu count of the instance. +- `description` (String) The flavor description. +- `id` (String) The id of the instance flavor. +- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte. +- `memory` (Number) The memory of the instance in Gibibyte. +- `min_gb` (Number) minimum storage which is required to order in Gigabyte. +- `node_type` (String) defines the nodeType it can be either single or HA +- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes)) + + +### Nested Schema for `storage_classes` + +Read-Only: + +- `class` (String) +- `max_io_per_sec` (Number) +- `max_through_in_mb` (Number) diff --git a/docs/data-sources/sqlserverflexbeta_instance.md b/docs/data-sources/sqlserverflexbeta_instance.md new file mode 100644 index 00000000..cc3645ef --- /dev/null +++ b/docs/data-sources/sqlserverflexbeta_instance.md @@ -0,0 +1,77 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackitprivatepreview_sqlserverflexbeta_instance Data Source - stackitprivatepreview" +subcategory: "" +description: |- + +--- + +# stackitprivatepreview_sqlserverflexbeta_instance (Data Source) + + + +## Example Usage + +```terraform +data "stackitprivatepreview_sqlserverflexbeta_instance" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" +} +``` + + +## Schema + +### Required + +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed + +### Read-Only + +- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule. +- `edition` (String) Edition of the MSSQL server instance +- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) +- `flavor_id` (String) The id of the instance flavor. +- `id` (String) The ID of the instance. +- `is_deletable` (Boolean) Whether the instance can be deleted or not. +- `name` (String) The name of the instance. +- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) +- `replicas` (Number) How many replicas the instance should have. +- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365 +- `status` (String) +- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) +- `version` (String) The sqlserver version used for the instance. + + +### Nested Schema for `encryption` + +Read-Only: + +- `kek_key_id` (String) The key identifier +- `kek_key_ring_id` (String) The keyring identifier +- `kek_key_version` (String) The key version +- `service_account` (String) + + + +### Nested Schema for `network` + +Read-Only: + +- `access_scope` (String) The network access scope of the instance + +⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. +- `acl` (List of String) List of IPV4 cidr. +- `instance_address` (String) +- `router_address` (String) + + + +### Nested Schema for `storage` + +Read-Only: + +- `class` (String) The storage class for the storage. +- `size` (Number) The storage size in Gigabytes. diff --git a/docs/index.md b/docs/index.md index 4f1e52cd..84bc25b3 100644 --- a/docs/index.md +++ b/docs/index.md @@ -16,14 +16,13 @@ provider "stackitprivatepreview" { default_region = "eu01" } -# Authentication - -# Token flow (scheduled for deprecation and will be removed on December 17, 2025) provider "stackitprivatepreview" { - default_region = "eu01" - service_account_token = var.service_account_token + default_region = "eu01" + service_account_key_path = "service_account.json" } +# Authentication + # Key flow provider "stackitprivatepreview" { default_region = "eu01" diff --git a/docs/resources/sqlserverflexbeta_database.md b/docs/resources/sqlserverflexbeta_database.md new file mode 100644 index 00000000..893433fe --- /dev/null +++ b/docs/resources/sqlserverflexbeta_database.md @@ -0,0 +1,36 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackitprivatepreview_sqlserverflexbeta_database Resource - stackitprivatepreview" +subcategory: "" +description: |- + +--- + +# stackitprivatepreview_sqlserverflexbeta_database (Resource) + + + + + + +## Schema + +### Required + +- `name` (String) The name of the database. +- `owner` (String) The owner of the database. + +### Optional + +- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. +- `compatibility` (Number) CompatibilityLevel of the Database. +- `database_name` (String) The name of the database. +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed + +### Read-Only + +- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. +- `compatibility_level` (Number) CompatibilityLevel of the Database. +- `id` (Number) The id of the database. diff --git a/docs/resources/sqlserverflexbeta_instance.md b/docs/resources/sqlserverflexbeta_instance.md new file mode 100644 index 00000000..20f5a9bc --- /dev/null +++ b/docs/resources/sqlserverflexbeta_instance.md @@ -0,0 +1,158 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackitprivatepreview_sqlserverflexbeta_instance Resource - stackitprivatepreview" +subcategory: "" +description: |- + +--- + +# stackitprivatepreview_sqlserverflexbeta_instance (Resource) + + + +## Example Usage + +```terraform +# without encryption and SNA +resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example-instance" + backup_schedule = "0 3 * * *" + retention_days = 31 + flavor_id = "flavor_id" + storage = { + class = "premium-perf2-stackit" + size = 50 + } + version = 2022 + network = { + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + access_scope = "SNA" + } +} + +# without encryption and PUBLIC +resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example-instance" + backup_schedule = "0 3 * * *" + retention_days = 31 + flavor_id = "flavor_id" + storage = { + class = "premium-perf2-stackit" + size = 50 + } + version = 2022 + network = { + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + access_scope = "PUBLIC" + } +} + +# with encryption and SNA +resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example-instance" + backup_schedule = "0 3 * * *" + retention_days = 31 + flavor_id = "flavor_id" + storage = { + class = "premium-perf2-stackit" + size = 50 + } + version = 2022 + encryption = { + kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_version = 1 + service_account = "service_account@email" + } + network = { + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + access_scope = "SNA" + } +} + + +# Only use the import statement, if you want to import an existing sqlserverflex instance +import { + to = stackitprivatepreview_sqlserverflexalpha_instance.import-example + id = "${var.project_id},${var.region},${var.sql_instance_id}" +} + +# import with identity +import { + to = stackitprivatepreview_sqlserverflexalpha_instance.import-example + identity = { + project_id = var.project_id + region = var.region + instance_id = var.sql_instance_id + } +} +``` + + +## Schema + +### Required + +- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule. +- `flavor_id` (String) The id of the instance flavor. +- `name` (String) The name of the instance. +- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network)) +- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365 +- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) +- `version` (String) The sqlserver version used for the instance. + +### Optional + +- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed + +### Read-Only + +- `edition` (String) Edition of the MSSQL server instance +- `id` (String) The ID of the instance. +- `is_deletable` (Boolean) Whether the instance can be deleted or not. +- `replicas` (Number) How many replicas the instance should have. +- `status` (String) + + +### Nested Schema for `network` + +Required: + +- `acl` (List of String) List of IPV4 cidr. + +Optional: + +- `access_scope` (String) The network access scope of the instance + +⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. + +Read-Only: + +- `instance_address` (String) +- `router_address` (String) + + + +### Nested Schema for `storage` + +Required: + +- `class` (String) The storage class for the storage. +- `size` (Number) The storage size in Gigabytes. + + + +### Nested Schema for `encryption` + +Required: + +- `kek_key_id` (String) The key identifier +- `kek_key_ring_id` (String) The keyring identifier +- `kek_key_version` (String) The key version +- `service_account` (String) diff --git a/examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf b/examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf new file mode 100644 index 00000000..67017935 --- /dev/null +++ b/examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf @@ -0,0 +1,8 @@ +data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" { + project_id = var.project_id + region = var.region + cpu = 4 + ram = 16 + node_type = "Single" + storage_class = "premium-perf2-stackit" +} diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf new file mode 100644 index 00000000..25d94537 --- /dev/null +++ b/examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf @@ -0,0 +1,8 @@ +data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" { + project_id = var.project_id + region = var.region + cpu = 4 + ram = 16 + node_type = "Single" + storage_class = "premium-perf2-stackit" +} diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf new file mode 100644 index 00000000..894fcd33 --- /dev/null +++ b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf @@ -0,0 +1,5 @@ +data "stackitprivatepreview_sqlserverflexbeta_database" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + database_name = "dbname" +} diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf new file mode 100644 index 00000000..f40b9680 --- /dev/null +++ b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf @@ -0,0 +1,8 @@ +data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" { + project_id = var.project_id + region = var.region + cpu = 4 + ram = 16 + node_type = "Single" + storage_class = "premium-perf2-stackit" +} diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf new file mode 100644 index 00000000..b8c8fc2b --- /dev/null +++ b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf @@ -0,0 +1,4 @@ +data "stackitprivatepreview_sqlserverflexbeta_instance" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" +} diff --git a/examples/provider/provider.tf b/examples/provider/provider.tf index 1795874c..4db0aed3 100644 --- a/examples/provider/provider.tf +++ b/examples/provider/provider.tf @@ -2,14 +2,13 @@ provider "stackitprivatepreview" { default_region = "eu01" } -# Authentication - -# Token flow (scheduled for deprecation and will be removed on December 17, 2025) provider "stackitprivatepreview" { - default_region = "eu01" - service_account_token = var.service_account_token + default_region = "eu01" + service_account_key_path = "service_account.json" } +# Authentication + # Key flow provider "stackitprivatepreview" { default_region = "eu01" @@ -23,4 +22,3 @@ provider "stackitprivatepreview" { service_account_key_path = var.service_account_key_path private_key_path = var.private_key_path } - diff --git a/examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf b/examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf new file mode 100644 index 00000000..06e88f64 --- /dev/null +++ b/examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf @@ -0,0 +1,76 @@ +# without encryption and SNA +resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example-instance" + backup_schedule = "0 3 * * *" + retention_days = 31 + flavor_id = "flavor_id" + storage = { + class = "premium-perf2-stackit" + size = 50 + } + version = 2022 + network = { + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + access_scope = "SNA" + } +} + +# without encryption and PUBLIC +resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example-instance" + backup_schedule = "0 3 * * *" + retention_days = 31 + flavor_id = "flavor_id" + storage = { + class = "premium-perf2-stackit" + size = 50 + } + version = 2022 + network = { + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + access_scope = "PUBLIC" + } +} + +# with encryption and SNA +resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example-instance" + backup_schedule = "0 3 * * *" + retention_days = 31 + flavor_id = "flavor_id" + storage = { + class = "premium-perf2-stackit" + size = 50 + } + version = 2022 + encryption = { + kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_version = 1 + service_account = "service_account@email" + } + network = { + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + access_scope = "SNA" + } +} + + +# Only use the import statement, if you want to import an existing sqlserverflex instance +import { + to = stackitprivatepreview_sqlserverflexalpha_instance.import-example + id = "${var.project_id},${var.region},${var.sql_instance_id}" +} + +# import with identity +import { + to = stackitprivatepreview_sqlserverflexalpha_instance.import-example + identity = { + project_id = var.project_id + region = var.region + instance_id = var.sql_instance_id + } +} diff --git a/go.mod b/go.mod index 0815f3a3..10aca46d 100644 --- a/go.mod +++ b/go.mod @@ -29,13 +29,22 @@ require ( require ( dario.cat/mergo v1.0.1 // indirect + github.com/BurntSushi/toml v1.2.1 // indirect + github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect + github.com/Masterminds/goutils v1.1.1 // indirect + github.com/Masterminds/semver/v3 v3.2.0 // indirect + github.com/Masterminds/sprig/v3 v3.2.3 // indirect github.com/ProtonMail/go-crypto v1.3.0 // indirect github.com/agext/levenshtein v1.2.3 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect + github.com/armon/go-radix v1.0.0 // indirect + github.com/bgentry/speakeasy v0.1.0 // indirect + github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect github.com/cloudflare/circl v1.6.2 // indirect github.com/fatih/color v1.18.0 // indirect github.com/golang-jwt/jwt/v5 v5.3.0 // indirect github.com/golang/protobuf v1.5.4 // indirect + github.com/hashicorp/cli v1.1.7 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect @@ -50,27 +59,38 @@ require ( github.com/hashicorp/logutils v1.0.0 // indirect github.com/hashicorp/terraform-exec v0.24.0 // indirect github.com/hashicorp/terraform-json v0.27.2 // indirect + github.com/hashicorp/terraform-plugin-docs v0.24.0 // indirect github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 // indirect github.com/hashicorp/terraform-registry-address v0.4.0 // indirect github.com/hashicorp/terraform-svchost v0.2.0 // indirect github.com/hashicorp/yamux v0.1.2 // indirect + github.com/huandu/xstrings v1.3.3 // indirect + github.com/imdario/mergo v0.3.15 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/kr/text v0.2.0 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.9 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/oklog/run v1.2.0 // indirect + github.com/posener/complete v1.2.3 // indirect + github.com/shopspring/decimal v1.3.1 // indirect + github.com/spf13/cast v1.5.0 // indirect github.com/spf13/pflag v1.0.10 // indirect github.com/stretchr/testify v1.11.1 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/yuin/goldmark v1.7.7 // indirect + github.com/yuin/goldmark-meta v1.1.0 // indirect github.com/zclconf/go-cty v1.17.0 // indirect + go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect golang.org/x/crypto v0.47.0 // indirect + golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect golang.org/x/mod v0.32.0 // indirect golang.org/x/net v0.49.0 // indirect golang.org/x/sync v0.19.0 // indirect @@ -81,6 +101,7 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect google.golang.org/grpc v1.78.0 // indirect google.golang.org/protobuf v1.36.11 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect ) tool golang.org/x/tools/cmd/goimports diff --git a/go.sum b/go.sum index a7b3189b..59906446 100644 --- a/go.sum +++ b/go.sum @@ -1,5 +1,15 @@ dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= +github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/Kunde21/markdownfmt/v3 v3.1.0 h1:KiZu9LKs+wFFBQKhrZJrFZwtLnCCWJahL+S+E/3VnM0= +github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc= +github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= +github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= +github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= +github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= +github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= github.com/MatusOllah/slogcolor v1.7.0 h1:Nrd7yBPv2EBEEBEwl7WEPRmMd1ozZzw2jm8SLMYDbKs= github.com/MatusOllah/slogcolor v1.7.0/go.mod h1:5y1H50XuQIBvuYTJlmokWi+4FuPiJN5L7Z0jM4K4bYA= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= @@ -11,6 +21,12 @@ github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= +github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE= +github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw= github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= github.com/cloudflare/circl v1.6.2 h1:hL7VBpHHKzrV5WTfHCaBsgx/HGbBYlgrwvNXEVDYYsQ= @@ -54,8 +70,11 @@ github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/cli v1.1.7 h1:/fZJ+hNdwfTSfsxMBa9WWMlfjUZbX8/LnUxgAd7lCVU= +github.com/hashicorp/cli v1.1.7/go.mod h1:e6Mfpga9OCT1vqzFuoGZiiF/KaG9CbUfO5s3ghU3YgU= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -68,6 +87,7 @@ github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-plugin v1.7.0 h1:YghfQH/0QmPNc/AZMTFE3ac8fipZyZECHdDPshfk+mA= @@ -89,6 +109,8 @@ github.com/hashicorp/terraform-exec v0.24.0 h1:mL0xlk9H5g2bn0pPF6JQZk5YlByqSqrO5 github.com/hashicorp/terraform-exec v0.24.0/go.mod h1:lluc/rDYfAhYdslLJQg3J0oDqo88oGQAdHR+wDqFvo4= github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU= github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE= +github.com/hashicorp/terraform-plugin-docs v0.24.0 h1:YNZYd+8cpYclQyXbl1EEngbld8w7/LPOm99GD5nikIU= +github.com/hashicorp/terraform-plugin-docs v0.24.0/go.mod h1:YLg+7LEwVmRuJc0EuCw0SPLxuQXw5mW8iJ5ml/kvi+o= github.com/hashicorp/terraform-plugin-framework v1.17.0 h1:JdX50CFrYcYFY31gkmitAEAzLKoBgsK+iaJjDC8OexY= github.com/hashicorp/terraform-plugin-framework v1.17.0/go.mod h1:4OUXKdHNosX+ys6rLgVlgklfxN3WHR5VHSOABeS/BM0= github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow= @@ -107,8 +129,13 @@ github.com/hashicorp/terraform-svchost v0.2.0 h1:wVc2vMiodOHvNZcQw/3y9af1XSomgjG github.com/hashicorp/terraform-svchost v0.2.0/go.mod h1:/98rrS2yZsbppi4VGVCjwYmh8dqsKzISqK7Hli+0rcQ= github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= +github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= +github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= +github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= @@ -137,6 +164,9 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= +github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= @@ -145,6 +175,7 @@ github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQ github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E= @@ -154,13 +185,21 @@ github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxu github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= +github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= +github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g= github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= @@ -175,6 +214,9 @@ github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1 h1:6MJdy1xmdE+uOo/F8mR5HSldjPSHpdhwuqS3u9m2EWQ= github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1/go.mod h1:XLr3ZfrT1g8ZZMm7A6RXOPBuhBkikdUN2o/+/Y+Hu+g= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= @@ -190,10 +232,16 @@ github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/goldmark v1.7.7 h1:5m9rrB1sW3JUMToKFQfb+FGt1U7r57IHu5GrYrG2nqU= +github.com/yuin/goldmark v1.7.7/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= +github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= +github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= github.com/zclconf/go-cty v1.17.0 h1:seZvECve6XX4tmnvRzWtJNHdscMtYEx5R7bnnVyd/d0= github.com/zclconf/go-cty v1.17.0/go.mod h1:wqFzcImaLTI6A5HfsRwB0nj5n0MRZFwmey8YoFPPs3U= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= +go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw= +go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU= go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= @@ -209,8 +257,11 @@ go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42s go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= +golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME= +golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c= golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU= @@ -218,6 +269,7 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -235,6 +287,7 @@ golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= @@ -242,12 +295,14 @@ golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 h1:i0p03B68+xC1kD2QUO8 golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5/go.mod h1:b7fPSJ0pKZ3ccUh8gnTONJxhn3c/PS6tyzQvyqw4iA8= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -276,6 +331,9 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/sample/sqlserver/sqlserver.tf b/sample/sqlserver/sqlserver.tf index 847678bb..bf6a88d1 100644 --- a/sample/sqlserver/sqlserver.tf +++ b/sample/sqlserver/sqlserver.tf @@ -46,6 +46,34 @@ resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-001" { } } +resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-101" { + project_id = var.project_id + name = "msh-sna-101" + backup_schedule = "0 3 * * *" + retention_days = 31 + flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id + storage = { + class = "premium-perf2-stackit" + size = 50 + } + version = 2022 + encryption = { + #key_id = stackit_kms_key.key.key_id + #keyring_id = stackit_kms_keyring.keyring.keyring_id + #key_version = 1 + # key with scope public + kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b" + # key_id = var.key_id + kek_key_ring_id = var.keyring_id + kek_key_version = var.key_version + service_account = var.sa_email + } + network = { + acl = ["0.0.0.0/0", "193.148.160.0/19"] + access_scope = "SNA" + } +} + resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-nosna-001" { project_id = var.project_id name = "msh-nosna-001" diff --git a/service_specs/sqlserverflex/alpha/collation_config.yml.disabled b/service_specs/sqlserverflex/alpha/collation_config.yml.bak similarity index 92% rename from service_specs/sqlserverflex/alpha/collation_config.yml.disabled rename to service_specs/sqlserverflex/alpha/collation_config.yml.bak index 9cb13c19..9ebfe5b4 100644 --- a/service_specs/sqlserverflex/alpha/collation_config.yml.disabled +++ b/service_specs/sqlserverflex/alpha/collation_config.yml.bak @@ -2,7 +2,7 @@ provider: name: stackitprivatepreview data_sources: - collation: + collations: read: path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/collations method: GET diff --git a/service_specs/sqlserverflex/alpha/database_config.yml b/service_specs/sqlserverflex/alpha/database_config.yml index e8ea6ef9..cd592e80 100644 --- a/service_specs/sqlserverflex/alpha/database_config.yml +++ b/service_specs/sqlserverflex/alpha/database_config.yml @@ -1,13 +1,8 @@ - provider: name: stackitprivatepreview resources: database: - schema: - attributes: - aliases: - id: database_id create: path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases method: POST @@ -17,6 +12,10 @@ resources: delete: path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName} method: DELETE + schema: + attributes: + aliases: + id: databaseId data_sources: @@ -26,9 +25,10 @@ data_sources: method: GET database: - attributes: - aliases: - id: database_id read: path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName} method: GET + schema: + attributes: + aliases: + id: database_id diff --git a/service_specs/sqlserverflex/alpha/version_config.yml b/service_specs/sqlserverflex/alpha/version_config.yml.bak similarity index 92% rename from service_specs/sqlserverflex/alpha/version_config.yml rename to service_specs/sqlserverflex/alpha/version_config.yml.bak index 3a3f982d..937dccd5 100644 --- a/service_specs/sqlserverflex/alpha/version_config.yml +++ b/service_specs/sqlserverflex/alpha/version_config.yml.bak @@ -3,7 +3,7 @@ provider: name: stackitprivatepreview data_sources: - version: + versions: read: path: /v3alpha1/projects/{projectId}/regions/{region}/versions method: GET diff --git a/service_specs/sqlserverflex/beta/backup_config.yml.disabled b/service_specs/sqlserverflex/beta/backup_config.yml.disabled new file mode 100644 index 00000000..7df5fc4b --- /dev/null +++ b/service_specs/sqlserverflex/beta/backup_config.yml.disabled @@ -0,0 +1,13 @@ +provider: + name: stackitprivatepreview + +data_sources: + backups: + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/backups + method: GET + + backup: + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/backups/{backupId} + method: GET diff --git a/service_specs/sqlserverflex/beta/collation_config.yml.disabled b/service_specs/sqlserverflex/beta/collation_config.yml.disabled new file mode 100644 index 00000000..d1160ec3 --- /dev/null +++ b/service_specs/sqlserverflex/beta/collation_config.yml.disabled @@ -0,0 +1,8 @@ +provider: + name: stackitprivatepreview + +data_sources: + collation: + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/collations + method: GET diff --git a/service_specs/sqlserverflex/beta/database_config.yml b/service_specs/sqlserverflex/beta/database_config.yml new file mode 100644 index 00000000..d886fc20 --- /dev/null +++ b/service_specs/sqlserverflex/beta/database_config.yml @@ -0,0 +1,34 @@ + +provider: + name: stackitprivatepreview + +resources: + database: + schema: + attributes: + aliases: + id: databaseId + create: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases + method: POST + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName} + method: GET + delete: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName} + method: DELETE + + +data_sources: + databases: + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases + method: GET + + database: + attributes: + aliases: + id: database_id + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName} + method: GET diff --git a/service_specs/sqlserverflex/beta/flavors_config.yml b/service_specs/sqlserverflex/beta/flavors_config.yml new file mode 100644 index 00000000..4b985a4c --- /dev/null +++ b/service_specs/sqlserverflex/beta/flavors_config.yml @@ -0,0 +1,9 @@ + +provider: + name: stackitprivatepreview + +data_sources: + flavors: + read: + path: /v3beta1/projects/{projectId}/regions/{region}/flavors + method: GET diff --git a/service_specs/sqlserverflex/beta/instance_config.yml b/service_specs/sqlserverflex/beta/instance_config.yml new file mode 100644 index 00000000..cea25959 --- /dev/null +++ b/service_specs/sqlserverflex/beta/instance_config.yml @@ -0,0 +1,28 @@ +provider: + name: stackitprivatepreview + +resources: + instance: + create: + path: /v3beta1/projects/{projectId}/regions/{region}/instances + method: POST + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId} + method: GET + update: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId} + method: PUT + delete: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId} + method: DELETE + +data_sources: + instances: + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances + method: GET + + instance: + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId} + method: GET diff --git a/service_specs/sqlserverflex/beta/user_config.yml b/service_specs/sqlserverflex/beta/user_config.yml new file mode 100644 index 00000000..bfa9a3a7 --- /dev/null +++ b/service_specs/sqlserverflex/beta/user_config.yml @@ -0,0 +1,24 @@ + +provider: + name: stackitprivatepreview + +resources: + user: + create: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users + method: POST + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users/{userId} + method: GET + update: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users/{userId} + method: PUT + delete: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users/{userId} + method: DELETE + +data_sources: + user: + read: + path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users + method: GET diff --git a/service_specs/sqlserverflex/beta/version_config.yml.bak b/service_specs/sqlserverflex/beta/version_config.yml.bak new file mode 100644 index 00000000..70d79676 --- /dev/null +++ b/service_specs/sqlserverflex/beta/version_config.yml.bak @@ -0,0 +1,9 @@ + +provider: + name: stackitprivatepreview + +data_sources: + version: + read: + path: /v3beta1/projects/{projectId}/regions/{region}/versions + method: GET diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go index 9765d99a..3f8f787e 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go @@ -1,6 +1,6 @@ // Copyright (c) STACKIT -package sqlserverflex +package sqlserverflexalpha import ( "context" @@ -218,6 +218,7 @@ func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequ // Read refreshes the Terraform state with the latest data. func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform + //var model sqlserverflexalpha2.InstanceModel var model sqlserverflexalpha2.InstanceModel diags := req.Config.Get(ctx, &model) resp.Diagnostics.Append(diags...) diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go index ee75cd21..783d95e1 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/functions.go @@ -1,4 +1,4 @@ -package sqlserverflex +package sqlserverflexalpha import ( "context" @@ -113,152 +113,6 @@ func handleEncryption( return enc } -//func mapFields( -// ctx context.Context, -// resp *sqlserverflex.GetInstanceResponse, -// model *Model, -// storage *storageModel, -// encryption *encryptionModel, -// network *networkModel, -// region string, -//) error { -// if resp == nil { -// return fmt.Errorf("response input is nil") -// } -// if model == nil { -// return fmt.Errorf("model input is nil") -// } -// instance := resp -// -// var instanceId string -// if model.InstanceId.ValueString() != "" { -// instanceId = model.InstanceId.ValueString() -// } else if instance.Id != nil { -// instanceId = *instance.Id -// } else { -// return fmt.Errorf("instance id not present") -// } -// -// var storageValues map[string]attr.Value -// if instance.Storage == nil { -// storageValues = map[string]attr.Value{ -// "class": storage.Class, -// "size": storage.Size, -// } -// } else { -// storageValues = map[string]attr.Value{ -// "class": types.StringValue(*instance.Storage.Class), -// "size": types.Int64PointerValue(instance.Storage.Size), -// } -// } -// storageObject, diags := types.ObjectValue(storageTypes, storageValues) -// if diags.HasError() { -// return fmt.Errorf("creating storage: %w", core.DiagsToError(diags)) -// } -// -// var encryptionValues map[string]attr.Value -// if instance.Encryption == nil { -// encryptionValues = map[string]attr.Value{ -// "keyring_id": encryption.KeyRingId, -// "key_id": encryption.KeyId, -// "key_version": encryption.KeyVersion, -// "service_account": encryption.ServiceAccount, -// } -// } else { -// encryptionValues = map[string]attr.Value{ -// "keyring_id": types.StringValue(*instance.Encryption.KekKeyRingId), -// "key_id": types.StringValue(*instance.Encryption.KekKeyId), -// "key_version": types.StringValue(*instance.Encryption.KekKeyVersion), -// "service_account": types.StringValue(*instance.Encryption.ServiceAccount), -// } -// } -// encryptionObject, diags := types.ObjectValue(encryptionTypes, encryptionValues) -// if diags.HasError() { -// return fmt.Errorf("creating encryption: %w", core.DiagsToError(diags)) -// } -// -// var networkValues map[string]attr.Value -// if instance.Network == nil { -// networkValues = map[string]attr.Value{ -// "acl": network.ACL, -// "access_scope": network.AccessScope, -// "instance_address": network.InstanceAddress, -// "router_address": network.RouterAddress, -// } -// } else { -// aclList, diags := types.ListValueFrom(ctx, types.StringType, *instance.Network.Acl) -// if diags.HasError() { -// return fmt.Errorf("creating network (acl list): %w", core.DiagsToError(diags)) -// } -// -// var routerAddress string -// if instance.Network.RouterAddress != nil { -// routerAddress = *instance.Network.RouterAddress -// diags.AddWarning("field missing while mapping fields", "router_address was empty in API response") -// } -// if instance.Network.InstanceAddress == nil { -// return fmt.Errorf("creating network: no instance address returned") -// } -// networkValues = map[string]attr.Value{ -// "acl": aclList, -// "access_scope": types.StringValue(string(*instance.Network.AccessScope)), -// "instance_address": types.StringValue(*instance.Network.InstanceAddress), -// "router_address": types.StringValue(routerAddress), -// } -// } -// networkObject, diags := types.ObjectValue(networkTypes, networkValues) -// if diags.HasError() { -// return fmt.Errorf("creating network: %w", core.DiagsToError(diags)) -// } -// -// simplifiedModelBackupSchedule := utils.SimplifyBackupSchedule(model.BackupSchedule.ValueString()) -// // If the value returned by the API is different from the one in the model after simplification, -// // we update the model so that it causes an error in Terraform -// if simplifiedModelBackupSchedule != types.StringPointerValue(instance.BackupSchedule).ValueString() { -// model.BackupSchedule = types.StringPointerValue(instance.BackupSchedule) -// } -// -// if instance.Replicas == nil { -// return fmt.Errorf("instance has no replicas set") -// } -// -// if instance.RetentionDays == nil { -// return fmt.Errorf("instance has no retention days set") -// } -// -// if instance.Version == nil { -// return fmt.Errorf("instance has no version set") -// } -// -// if instance.Edition == nil { -// return fmt.Errorf("instance has no edition set") -// } -// -// if instance.Status == nil { -// return fmt.Errorf("instance has no status set") -// } -// -// if instance.IsDeletable == nil { -// return fmt.Errorf("instance has no IsDeletable set") -// } -// -// model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, instanceId) -// model.InstanceId = types.StringValue(instanceId) -// model.Name = types.StringPointerValue(instance.Name) -// model.FlavorId = types.StringPointerValue(instance.FlavorId) -// model.Replicas = types.Int64Value(int64(*instance.Replicas)) -// model.Storage = storageObject -// model.Version = types.StringValue(string(*instance.Version)) -// model.Edition = types.StringValue(string(*instance.Edition)) -// model.Region = types.StringValue(region) -// model.Encryption = encryptionObject -// model.Network = networkObject -// model.RetentionDays = types.Int64Value(*instance.RetentionDays) -// model.Status = types.StringValue(string(*instance.Status)) -// model.IsDeletable = types.BoolValue(*instance.IsDeletable) -// return nil -//} - func toCreatePayload( ctx context.Context, model *sqlserverflexResGen.InstanceModel, @@ -313,52 +167,6 @@ func toCreatePayload( }, nil } -////nolint:unused // TODO: remove if not needed later -//func toUpdatePartiallyPayload( -// model *Model, -// storage *storageModel, -// network *networkModel, -//) (*sqlserverflex.UpdateInstancePartiallyRequestPayload, error) { -// if model == nil { -// return nil, fmt.Errorf("nil model") -// } -// -// storagePayload := &sqlserverflex.UpdateInstanceRequestPayloadGetStorageArgType{} -// if storage != nil { -// storagePayload.Size = conversion.Int64ValueToPointer(storage.Size) -// } -// -// var aclElements []string -// if network != nil && !network.ACL.IsNull() && !network.ACL.IsUnknown() { -// aclElements = make([]string, 0, len(network.ACL.Elements())) -// diags := network.ACL.ElementsAs(context.TODO(), &aclElements, false) -// if diags.HasError() { -// return nil, fmt.Errorf("creating network: %w", core.DiagsToError(diags)) -// } -// } -// -// networkPayload := &sqlserverflex.UpdateInstancePartiallyRequestPayloadGetNetworkArgType{} -// if network != nil { -// networkPayload.AccessScope = sqlserverflex.UpdateInstancePartiallyRequestPayloadNetworkGetAccessScopeAttributeType(conversion.StringValueToPointer(network.AccessScope)) -// networkPayload.Acl = &aclElements -// } -// -// if model.Replicas.ValueInt64() > math.MaxInt32 { -// return nil, fmt.Errorf("replica count too big: %d", model.Replicas.ValueInt64()) -// } -// replCount := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above -// return &sqlserverflex.UpdateInstancePartiallyRequestPayload{ -// BackupSchedule: conversion.StringValueToPointer(model.BackupSchedule), -// FlavorId: conversion.StringValueToPointer(model.FlavorId), -// Name: conversion.StringValueToPointer(model.Name), -// Network: networkPayload, -// Replicas: sqlserverflex.UpdateInstancePartiallyRequestPayloadGetReplicasAttributeType(&replCount), -// RetentionDays: conversion.Int64ValueToPointer(model.RetentionDays), -// Storage: storagePayload, -// Version: sqlserverflex.UpdateInstancePartiallyRequestPayloadGetVersionAttributeType(conversion.StringValueToPointer(model.Version)), -// }, nil -//} - // TODO: check func with his args func toUpdatePayload( ctx context.Context, @@ -380,9 +188,8 @@ func toUpdatePayload( BackupSchedule: m.BackupSchedule.ValueStringPointer(), FlavorId: m.FlavorId.ValueStringPointer(), Name: m.Name.ValueStringPointer(), - Network: &sqlserverflex.CreateInstanceRequestPayloadNetwork{ - AccessScope: sqlserverflex.CreateInstanceRequestPayloadNetworkGetAccessScopeAttributeType(m.Network.AccessScope.ValueStringPointer()), - Acl: &netAcl, + Network: &sqlserverflex.UpdateInstanceRequestPayloadNetwork{ + Acl: &netAcl, }, Replicas: &replVal, RetentionDays: m.RetentionDays.ValueInt64Pointer(), diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go index 66436cac..9257c8df 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go @@ -1,6 +1,6 @@ // Copyright (c) STACKIT -package sqlserverflex +package sqlserverflexalpha import ( "context" @@ -592,7 +592,6 @@ func (r *instanceResource) Read( ctx = core.LogResponse(ctx) // Map response body to schema - // err = mapFields(ctx, instanceResp, &model, storage, encryption, network, region) err = mapResponseToModel(ctx, instanceResp, &model, resp.Diagnostics) if err != nil { core.LogAndAddError( @@ -755,6 +754,7 @@ func (r *instanceResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { + // TODO idParts := strings.Split(req.ID, core.Separator) if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" { diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go b/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go index 7768f1e9..175711ff 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go @@ -1,18 +1,4 @@ -// Copyright (c) STACKIT - -package sqlserverflex - -import ( - "context" - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" - "github.com/stackitcloud/stackit-sdk-go/core/utils" - sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" -) +package sqlserverflexalpha // type sqlserverflexClientMocked struct { // returnError bool @@ -27,343 +13,343 @@ import ( // return c.listFlavorsResp, nil // } -func TestMapFields(t *testing.T) { - t.Skip("Skipping - needs refactoring") - const testRegion = "region" - tests := []struct { - description string - state Model - input *sqlserverflex.GetInstanceResponse - storage *storageModel - encryption *encryptionModel - network *networkModel - region string - expected Model - isValid bool - }{ - { - "default_values", - Model{ - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Replicas: types.Int64Value(1), - RetentionDays: types.Int64Value(1), - Version: types.StringValue("v1"), - Edition: types.StringValue("edition 1"), - Status: types.StringValue("status"), - IsDeletable: types.BoolValue(true), - }, - &sqlserverflex.GetInstanceResponse{ - FlavorId: utils.Ptr("flavor_id"), - Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(1))), - RetentionDays: utils.Ptr(int64(1)), - Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("v1")), - Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition 1")), - Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), - IsDeletable: utils.Ptr(true), - }, - &storageModel{}, - &encryptionModel{}, - &networkModel{ - ACL: types.ListNull(basetypes.StringType{}), - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid"), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringNull(), - BackupSchedule: types.StringNull(), - Replicas: types.Int64Value(1), - Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ - "class": types.StringNull(), - "size": types.Int64Null(), - }), - Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{ - "keyring_id": types.StringNull(), - "key_id": types.StringNull(), - "key_version": types.StringNull(), - "service_account": types.StringNull(), - }), - Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{ - "acl": types.ListNull(types.StringType), - "access_scope": types.StringNull(), - "instance_address": types.StringNull(), - "router_address": types.StringNull(), - }), - IsDeletable: types.BoolValue(true), - Edition: types.StringValue("edition 1"), - Status: types.StringValue("status"), - RetentionDays: types.Int64Value(1), - Version: types.StringValue("v1"), - Region: types.StringValue(testRegion), - }, - true, - }, - { - "simple_values", - Model{ - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - }, - &sqlserverflex.GetInstanceResponse{ - BackupSchedule: utils.Ptr("schedule"), - FlavorId: utils.Ptr("flavor_id"), - Id: utils.Ptr("iid"), - Name: utils.Ptr("name"), - Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))), - Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), - Storage: &sqlserverflex.Storage{ - Class: utils.Ptr("class"), - Size: utils.Ptr(int64(78)), - }, - Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")), - RetentionDays: utils.Ptr(int64(1)), - Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")), - IsDeletable: utils.Ptr(true), - Encryption: nil, - Network: &sqlserverflex.InstanceNetwork{ - AccessScope: nil, - Acl: &[]string{ - "ip1", - "ip2", - "", - }, - InstanceAddress: nil, - RouterAddress: nil, - }, - }, - &storageModel{}, - &encryptionModel{}, - &networkModel{ - ACL: types.ListValueMust(basetypes.StringType{}, []attr.Value{ - types.StringValue("ip1"), - types.StringValue("ip2"), - types.StringValue(""), - }), - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid"), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringValue("name"), - BackupSchedule: types.StringValue("schedule"), - Replicas: types.Int64Value(56), - Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ - "class": types.StringValue("class"), - "size": types.Int64Value(78), - }), - Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{ - "acl": types.ListValueMust(types.StringType, []attr.Value{ - types.StringValue("ip1"), - types.StringValue("ip2"), - types.StringValue(""), - }), - "access_scope": types.StringNull(), - "instance_address": types.StringNull(), - "router_address": types.StringNull(), - }), - Edition: types.StringValue("edition"), - RetentionDays: types.Int64Value(1), - Version: types.StringValue("version"), - Region: types.StringValue(testRegion), - IsDeletable: types.BoolValue(true), - Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{ - "keyring_id": types.StringNull(), - "key_id": types.StringNull(), - "key_version": types.StringNull(), - "service_account": types.StringNull(), - }), - Status: types.StringValue("status"), - }, - true, - }, - // { - // "simple_values_no_flavor_and_storage", - // Model{ - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // }, - // &sqlserverflex.GetInstanceResponse{ - // Acl: &[]string{ - // "ip1", - // "ip2", - // "", - // }, - // BackupSchedule: utils.Ptr("schedule"), - // FlavorId: nil, - // Id: utils.Ptr("iid"), - // Name: utils.Ptr("name"), - // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))), - // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), - // Storage: nil, - // Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")), - // RetentionDays: utils.Ptr(int64(1)), - // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")), - // }, - // &flavorModel{ - // CPU: types.Int64Value(12), - // RAM: types.Int64Value(34), - // }, - // &storageModel{ - // Class: types.StringValue("class"), - // Size: types.Int64Value(78), - // }, - // &optionsModel{ - // Edition: types.StringValue("edition"), - // RetentionDays: types.Int64Value(1), - // }, - // testRegion, - // Model{ - // Id: types.StringValue("pid,region,iid"), - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // Name: types.StringValue("name"), - // ACL: types.ListValueMust(types.StringType, []attr.Value{ - // types.StringValue("ip1"), - // types.StringValue("ip2"), - // types.StringValue(""), - // }), - // BackupSchedule: types.StringValue("schedule"), - // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{ - // "id": types.StringNull(), - // "description": types.StringNull(), - // "cpu": types.Int64Value(12), - // "ram": types.Int64Value(34), - // }), - // Replicas: types.Int64Value(56), - // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ - // "class": types.StringValue("class"), - // "size": types.Int64Value(78), - // }), - // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{ - // "edition": types.StringValue("edition"), - // "retention_days": types.Int64Value(1), - // }), - // Version: types.StringValue("version"), - // Region: types.StringValue(testRegion), - // }, - // true, - // }, - // { - // "acls_unordered", - // Model{ - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // ACL: types.ListValueMust(types.StringType, []attr.Value{ - // types.StringValue("ip2"), - // types.StringValue(""), - // types.StringValue("ip1"), - // }), - // }, - // &sqlserverflex.GetInstanceResponse{ - // Acl: &[]string{ - // "", - // "ip1", - // "ip2", - // }, - // BackupSchedule: utils.Ptr("schedule"), - // FlavorId: nil, - // Id: utils.Ptr("iid"), - // Name: utils.Ptr("name"), - // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))), - // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), - // Storage: nil, - // //Options: &map[string]string{ - // // "edition": "edition", - // // "retentionDays": "1", - // //}, - // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")), - // }, - // &flavorModel{ - // CPU: types.Int64Value(12), - // RAM: types.Int64Value(34), - // }, - // &storageModel{ - // Class: types.StringValue("class"), - // Size: types.Int64Value(78), - // }, - // &optionsModel{}, - // testRegion, - // Model{ - // Id: types.StringValue("pid,region,iid"), - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // Name: types.StringValue("name"), - // ACL: types.ListValueMust(types.StringType, []attr.Value{ - // types.StringValue("ip2"), - // types.StringValue(""), - // types.StringValue("ip1"), - // }), - // BackupSchedule: types.StringValue("schedule"), - // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{ - // "id": types.StringNull(), - // "description": types.StringNull(), - // "cpu": types.Int64Value(12), - // "ram": types.Int64Value(34), - // }), - // Replicas: types.Int64Value(56), - // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ - // "class": types.StringValue("class"), - // "size": types.Int64Value(78), - // }), - // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{ - // "edition": types.StringValue("edition"), - // "retention_days": types.Int64Value(1), - // }), - // Version: types.StringValue("version"), - // Region: types.StringValue(testRegion), - // }, - // true, - // }, - // { - // "nil_response", - // Model{ - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // }, - // nil, - // &flavorModel{}, - // &storageModel{}, - // &optionsModel{}, - // testRegion, - // Model{}, - // false, - // }, - // { - // "no_resource_id", - // Model{ - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // }, - // &sqlserverflex.GetInstanceResponse{}, - // &flavorModel{}, - // &storageModel{}, - // &optionsModel{}, - // testRegion, - // Model{}, - // false, - // }, - } - for _, tt := range tests { - t.Run(tt.description, func(t *testing.T) { - err := mapFields(context.Background(), tt.input, &tt.state, tt.storage, tt.encryption, tt.network, tt.region) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(tt.state, tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }) - } -} +//func TestMapFields(t *testing.T) { +// t.Skip("Skipping - needs refactoring") +// const testRegion = "region" +// tests := []struct { +// description string +// state Model +// input *sqlserverflex.GetInstanceResponse +// storage *storageModel +// encryption *encryptionModel +// network *networkModel +// region string +// expected Model +// isValid bool +// }{ +// { +// "default_values", +// Model{ +// InstanceId: types.StringValue("iid"), +// ProjectId: types.StringValue("pid"), +// Replicas: types.Int64Value(1), +// RetentionDays: types.Int64Value(1), +// Version: types.StringValue("v1"), +// Edition: types.StringValue("edition 1"), +// Status: types.StringValue("status"), +// IsDeletable: types.BoolValue(true), +// }, +// &sqlserverflex.GetInstanceResponse{ +// FlavorId: utils.Ptr("flavor_id"), +// Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(1))), +// RetentionDays: utils.Ptr(int64(1)), +// Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("v1")), +// Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition 1")), +// Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), +// IsDeletable: utils.Ptr(true), +// }, +// &storageModel{}, +// &encryptionModel{}, +// &networkModel{ +// ACL: types.ListNull(basetypes.StringType{}), +// }, +// testRegion, +// Model{ +// Id: types.StringValue("pid,region,iid"), +// InstanceId: types.StringValue("iid"), +// ProjectId: types.StringValue("pid"), +// Name: types.StringNull(), +// BackupSchedule: types.StringNull(), +// Replicas: types.Int64Value(1), +// Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ +// "class": types.StringNull(), +// "size": types.Int64Null(), +// }), +// Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{ +// "keyring_id": types.StringNull(), +// "key_id": types.StringNull(), +// "key_version": types.StringNull(), +// "service_account": types.StringNull(), +// }), +// Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{ +// "acl": types.ListNull(types.StringType), +// "access_scope": types.StringNull(), +// "instance_address": types.StringNull(), +// "router_address": types.StringNull(), +// }), +// IsDeletable: types.BoolValue(true), +// Edition: types.StringValue("edition 1"), +// Status: types.StringValue("status"), +// RetentionDays: types.Int64Value(1), +// Version: types.StringValue("v1"), +// Region: types.StringValue(testRegion), +// }, +// true, +// }, +// { +// "simple_values", +// Model{ +// InstanceId: types.StringValue("iid"), +// ProjectId: types.StringValue("pid"), +// }, +// &sqlserverflex.GetInstanceResponse{ +// BackupSchedule: utils.Ptr("schedule"), +// FlavorId: utils.Ptr("flavor_id"), +// Id: utils.Ptr("iid"), +// Name: utils.Ptr("name"), +// Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))), +// Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), +// Storage: &sqlserverflex.Storage{ +// Class: utils.Ptr("class"), +// Size: utils.Ptr(int64(78)), +// }, +// Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")), +// RetentionDays: utils.Ptr(int64(1)), +// Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")), +// IsDeletable: utils.Ptr(true), +// Encryption: nil, +// Network: &sqlserverflex.InstanceNetwork{ +// AccessScope: nil, +// Acl: &[]string{ +// "ip1", +// "ip2", +// "", +// }, +// InstanceAddress: nil, +// RouterAddress: nil, +// }, +// }, +// &storageModel{}, +// &encryptionModel{}, +// &networkModel{ +// ACL: types.ListValueMust(basetypes.StringType{}, []attr.Value{ +// types.StringValue("ip1"), +// types.StringValue("ip2"), +// types.StringValue(""), +// }), +// }, +// testRegion, +// Model{ +// Id: types.StringValue("pid,region,iid"), +// InstanceId: types.StringValue("iid"), +// ProjectId: types.StringValue("pid"), +// Name: types.StringValue("name"), +// BackupSchedule: types.StringValue("schedule"), +// Replicas: types.Int64Value(56), +// Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ +// "class": types.StringValue("class"), +// "size": types.Int64Value(78), +// }), +// Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{ +// "acl": types.ListValueMust(types.StringType, []attr.Value{ +// types.StringValue("ip1"), +// types.StringValue("ip2"), +// types.StringValue(""), +// }), +// "access_scope": types.StringNull(), +// "instance_address": types.StringNull(), +// "router_address": types.StringNull(), +// }), +// Edition: types.StringValue("edition"), +// RetentionDays: types.Int64Value(1), +// Version: types.StringValue("version"), +// Region: types.StringValue(testRegion), +// IsDeletable: types.BoolValue(true), +// Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{ +// "keyring_id": types.StringNull(), +// "key_id": types.StringNull(), +// "key_version": types.StringNull(), +// "service_account": types.StringNull(), +// }), +// Status: types.StringValue("status"), +// }, +// true, +// }, +// // { +// // "simple_values_no_flavor_and_storage", +// // Model{ +// // InstanceId: types.StringValue("iid"), +// // ProjectId: types.StringValue("pid"), +// // }, +// // &sqlserverflex.GetInstanceResponse{ +// // Acl: &[]string{ +// // "ip1", +// // "ip2", +// // "", +// // }, +// // BackupSchedule: utils.Ptr("schedule"), +// // FlavorId: nil, +// // Id: utils.Ptr("iid"), +// // Name: utils.Ptr("name"), +// // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))), +// // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), +// // Storage: nil, +// // Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")), +// // RetentionDays: utils.Ptr(int64(1)), +// // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")), +// // }, +// // &flavorModel{ +// // CPU: types.Int64Value(12), +// // RAM: types.Int64Value(34), +// // }, +// // &storageModel{ +// // Class: types.StringValue("class"), +// // Size: types.Int64Value(78), +// // }, +// // &optionsModel{ +// // Edition: types.StringValue("edition"), +// // RetentionDays: types.Int64Value(1), +// // }, +// // testRegion, +// // Model{ +// // Id: types.StringValue("pid,region,iid"), +// // InstanceId: types.StringValue("iid"), +// // ProjectId: types.StringValue("pid"), +// // Name: types.StringValue("name"), +// // ACL: types.ListValueMust(types.StringType, []attr.Value{ +// // types.StringValue("ip1"), +// // types.StringValue("ip2"), +// // types.StringValue(""), +// // }), +// // BackupSchedule: types.StringValue("schedule"), +// // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{ +// // "id": types.StringNull(), +// // "description": types.StringNull(), +// // "cpu": types.Int64Value(12), +// // "ram": types.Int64Value(34), +// // }), +// // Replicas: types.Int64Value(56), +// // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ +// // "class": types.StringValue("class"), +// // "size": types.Int64Value(78), +// // }), +// // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{ +// // "edition": types.StringValue("edition"), +// // "retention_days": types.Int64Value(1), +// // }), +// // Version: types.StringValue("version"), +// // Region: types.StringValue(testRegion), +// // }, +// // true, +// // }, +// // { +// // "acls_unordered", +// // Model{ +// // InstanceId: types.StringValue("iid"), +// // ProjectId: types.StringValue("pid"), +// // ACL: types.ListValueMust(types.StringType, []attr.Value{ +// // types.StringValue("ip2"), +// // types.StringValue(""), +// // types.StringValue("ip1"), +// // }), +// // }, +// // &sqlserverflex.GetInstanceResponse{ +// // Acl: &[]string{ +// // "", +// // "ip1", +// // "ip2", +// // }, +// // BackupSchedule: utils.Ptr("schedule"), +// // FlavorId: nil, +// // Id: utils.Ptr("iid"), +// // Name: utils.Ptr("name"), +// // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))), +// // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), +// // Storage: nil, +// // //Options: &map[string]string{ +// // // "edition": "edition", +// // // "retentionDays": "1", +// // //}, +// // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")), +// // }, +// // &flavorModel{ +// // CPU: types.Int64Value(12), +// // RAM: types.Int64Value(34), +// // }, +// // &storageModel{ +// // Class: types.StringValue("class"), +// // Size: types.Int64Value(78), +// // }, +// // &optionsModel{}, +// // testRegion, +// // Model{ +// // Id: types.StringValue("pid,region,iid"), +// // InstanceId: types.StringValue("iid"), +// // ProjectId: types.StringValue("pid"), +// // Name: types.StringValue("name"), +// // ACL: types.ListValueMust(types.StringType, []attr.Value{ +// // types.StringValue("ip2"), +// // types.StringValue(""), +// // types.StringValue("ip1"), +// // }), +// // BackupSchedule: types.StringValue("schedule"), +// // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{ +// // "id": types.StringNull(), +// // "description": types.StringNull(), +// // "cpu": types.Int64Value(12), +// // "ram": types.Int64Value(34), +// // }), +// // Replicas: types.Int64Value(56), +// // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ +// // "class": types.StringValue("class"), +// // "size": types.Int64Value(78), +// // }), +// // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{ +// // "edition": types.StringValue("edition"), +// // "retention_days": types.Int64Value(1), +// // }), +// // Version: types.StringValue("version"), +// // Region: types.StringValue(testRegion), +// // }, +// // true, +// // }, +// // { +// // "nil_response", +// // Model{ +// // InstanceId: types.StringValue("iid"), +// // ProjectId: types.StringValue("pid"), +// // }, +// // nil, +// // &flavorModel{}, +// // &storageModel{}, +// // &optionsModel{}, +// // testRegion, +// // Model{}, +// // false, +// // }, +// // { +// // "no_resource_id", +// // Model{ +// // InstanceId: types.StringValue("iid"), +// // ProjectId: types.StringValue("pid"), +// // }, +// // &sqlserverflex.GetInstanceResponse{}, +// // &flavorModel{}, +// // &storageModel{}, +// // &optionsModel{}, +// // testRegion, +// // Model{}, +// // false, +// // }, +// } +// for _, tt := range tests { +// t.Run(tt.description, func(t *testing.T) { +// err := mapFields(context.Background(), tt.input, &tt.state, tt.storage, tt.encryption, tt.network, tt.region) +// if !tt.isValid && err == nil { +// t.Fatalf("Should have failed") +// } +// if tt.isValid && err != nil { +// t.Fatalf("Should not have failed: %v", err) +// } +// if tt.isValid { +// diff := cmp.Diff(tt.state, tt.expected) +// if diff != "" { +// t.Fatalf("Data does not match: %s", diff) +// } +// } +// }) +// } +//} // func TestToCreatePayload(t *testing.T) { // tests := []struct { diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util.go b/stackit/internal/services/sqlserverflexalpha/utils/util.go index 4180955b..db031162 100644 --- a/stackit/internal/services/sqlserverflexalpha/utils/util.go +++ b/stackit/internal/services/sqlserverflexalpha/utils/util.go @@ -1,5 +1,3 @@ -// Copyright (c) STACKIT - package utils import ( diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go b/stackit/internal/services/sqlserverflexalpha/utils/util_test.go index 7818408d..7afd6b1d 100644 --- a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go +++ b/stackit/internal/services/sqlserverflexalpha/utils/util_test.go @@ -1,5 +1,3 @@ -// Copyright (c) STACKIT - package utils import ( diff --git a/stackit/internal/services/sqlserverflexalpha/version/datasource.go b/stackit/internal/services/sqlserverflexalpha/version/datasource.go deleted file mode 100644 index 707ba2f9..00000000 --- a/stackit/internal/services/sqlserverflexalpha/version/datasource.go +++ /dev/null @@ -1,71 +0,0 @@ -package sqlserverflexalpha - -import ( - "context" - - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-log/tflog" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" - - sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/version/datasources_gen" -) - -var ( - _ datasource.DataSource = (*versionDataSource)(nil) - _ datasource.DataSourceWithConfigure = (*versionDataSource)(nil) -) - -func NewVersionDataSource() datasource.DataSource { - return &versionDataSource{} -} - -type versionDataSource struct { - client *sqlserverflexalpha.APIClient - providerData core.ProviderData -} - -func (d *versionDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_version" -} - -func (d *versionDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - resp.Schema = sqlserverflexalphaGen.VersionDataSourceSchema(ctx) -} - -// Configure adds the provider configured client to the data source. -func (d *versionDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { - var ok bool - d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) - if !ok { - return - } - - apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - d.client = apiClient - tflog.Info(ctx, "SQL SERVER Flex version client configured") -} - -func (d *versionDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data sqlserverflexalphaGen.VersionModel - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) - - if resp.Diagnostics.HasError() { - return - } - - // Todo: Read API call logic - - // Example data value setting - // data.Id = types.StringValue("example-id") - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} diff --git a/stackit/internal/services/sqlserverflexalpha/version/datasources_gen/version_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/version/datasources_gen/version_data_source_gen.go deleted file mode 100644 index cb9008f1..00000000 --- a/stackit/internal/services/sqlserverflexalpha/version/datasources_gen/version_data_source_gen.go +++ /dev/null @@ -1,569 +0,0 @@ -// Code generated by terraform-plugin-framework-generator DO NOT EDIT. - -package sqlserverflexalpha - -import ( - "context" - "fmt" - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" - "github.com/hashicorp/terraform-plugin-go/tftypes" - "strings" - - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" -) - -func VersionDataSourceSchema(ctx context.Context) schema.Schema { - return schema.Schema{ - Attributes: map[string]schema.Attribute{ - "project_id": schema.StringAttribute{ - Required: true, - Description: "The STACKIT project ID.", - MarkdownDescription: "The STACKIT project ID.", - }, - "region": schema.StringAttribute{ - Required: true, - Description: "The region which should be addressed", - MarkdownDescription: "The region which should be addressed", - Validators: []validator.String{ - stringvalidator.OneOf( - "eu01", - ), - }, - }, - "versions": schema.ListNestedAttribute{ - NestedObject: schema.NestedAttributeObject{ - Attributes: map[string]schema.Attribute{ - "beta": schema.BoolAttribute{ - Computed: true, - Description: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.", - MarkdownDescription: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.", - }, - "deprecated": schema.StringAttribute{ - Computed: true, - Description: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.", - MarkdownDescription: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.", - }, - "recommend": schema.BoolAttribute{ - Computed: true, - Description: "Flag if the version is recommend by the STACKIT Team.", - MarkdownDescription: "Flag if the version is recommend by the STACKIT Team.", - }, - "version": schema.StringAttribute{ - Computed: true, - Description: "The sqlserver version used for the instance.", - MarkdownDescription: "The sqlserver version used for the instance.", - }, - }, - CustomType: VersionsType{ - ObjectType: types.ObjectType{ - AttrTypes: VersionsValue{}.AttributeTypes(ctx), - }, - }, - }, - Computed: true, - Description: "A list containing available sqlserver versions.", - MarkdownDescription: "A list containing available sqlserver versions.", - }, - }, - } -} - -type VersionModel struct { - ProjectId types.String `tfsdk:"project_id"` - Region types.String `tfsdk:"region"` - Versions types.List `tfsdk:"versions"` -} - -var _ basetypes.ObjectTypable = VersionsType{} - -type VersionsType struct { - basetypes.ObjectType -} - -func (t VersionsType) Equal(o attr.Type) bool { - other, ok := o.(VersionsType) - - if !ok { - return false - } - - return t.ObjectType.Equal(other.ObjectType) -} - -func (t VersionsType) String() string { - return "VersionsType" -} - -func (t VersionsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { - var diags diag.Diagnostics - - attributes := in.Attributes() - - betaAttribute, ok := attributes["beta"] - - if !ok { - diags.AddError( - "Attribute Missing", - `beta is missing from object`) - - return nil, diags - } - - betaVal, ok := betaAttribute.(basetypes.BoolValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute)) - } - - deprecatedAttribute, ok := attributes["deprecated"] - - if !ok { - diags.AddError( - "Attribute Missing", - `deprecated is missing from object`) - - return nil, diags - } - - deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute)) - } - - recommendAttribute, ok := attributes["recommend"] - - if !ok { - diags.AddError( - "Attribute Missing", - `recommend is missing from object`) - - return nil, diags - } - - recommendVal, ok := recommendAttribute.(basetypes.BoolValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute)) - } - - versionAttribute, ok := attributes["version"] - - if !ok { - diags.AddError( - "Attribute Missing", - `version is missing from object`) - - return nil, diags - } - - versionVal, ok := versionAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute)) - } - - if diags.HasError() { - return nil, diags - } - - return VersionsValue{ - Beta: betaVal, - Deprecated: deprecatedVal, - Recommend: recommendVal, - Version: versionVal, - state: attr.ValueStateKnown, - }, diags -} - -func NewVersionsValueNull() VersionsValue { - return VersionsValue{ - state: attr.ValueStateNull, - } -} - -func NewVersionsValueUnknown() VersionsValue { - return VersionsValue{ - state: attr.ValueStateUnknown, - } -} - -func NewVersionsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (VersionsValue, diag.Diagnostics) { - var diags diag.Diagnostics - - // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 - ctx := context.Background() - - for name, attributeType := range attributeTypes { - attribute, ok := attributes[name] - - if !ok { - diags.AddError( - "Missing VersionsValue Attribute Value", - "While creating a VersionsValue value, a missing attribute value was detected. "+ - "A VersionsValue must contain values for all attributes, even if null or unknown. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), - ) - - continue - } - - if !attributeType.Equal(attribute.Type(ctx)) { - diags.AddError( - "Invalid VersionsValue Attribute Type", - "While creating a VersionsValue value, an invalid attribute value was detected. "+ - "A VersionsValue must use a matching attribute type for the value. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ - fmt.Sprintf("VersionsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), - ) - } - } - - for name := range attributes { - _, ok := attributeTypes[name] - - if !ok { - diags.AddError( - "Extra VersionsValue Attribute Value", - "While creating a VersionsValue value, an extra attribute value was detected. "+ - "A VersionsValue must not contain values beyond the expected attribute types. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("Extra VersionsValue Attribute Name: %s", name), - ) - } - } - - if diags.HasError() { - return NewVersionsValueUnknown(), diags - } - - betaAttribute, ok := attributes["beta"] - - if !ok { - diags.AddError( - "Attribute Missing", - `beta is missing from object`) - - return NewVersionsValueUnknown(), diags - } - - betaVal, ok := betaAttribute.(basetypes.BoolValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute)) - } - - deprecatedAttribute, ok := attributes["deprecated"] - - if !ok { - diags.AddError( - "Attribute Missing", - `deprecated is missing from object`) - - return NewVersionsValueUnknown(), diags - } - - deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute)) - } - - recommendAttribute, ok := attributes["recommend"] - - if !ok { - diags.AddError( - "Attribute Missing", - `recommend is missing from object`) - - return NewVersionsValueUnknown(), diags - } - - recommendVal, ok := recommendAttribute.(basetypes.BoolValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute)) - } - - versionAttribute, ok := attributes["version"] - - if !ok { - diags.AddError( - "Attribute Missing", - `version is missing from object`) - - return NewVersionsValueUnknown(), diags - } - - versionVal, ok := versionAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute)) - } - - if diags.HasError() { - return NewVersionsValueUnknown(), diags - } - - return VersionsValue{ - Beta: betaVal, - Deprecated: deprecatedVal, - Recommend: recommendVal, - Version: versionVal, - state: attr.ValueStateKnown, - }, diags -} - -func NewVersionsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) VersionsValue { - object, diags := NewVersionsValue(attributeTypes, attributes) - - if diags.HasError() { - // This could potentially be added to the diag package. - diagsStrings := make([]string, 0, len(diags)) - - for _, diagnostic := range diags { - diagsStrings = append(diagsStrings, fmt.Sprintf( - "%s | %s | %s", - diagnostic.Severity(), - diagnostic.Summary(), - diagnostic.Detail())) - } - - panic("NewVersionsValueMust received error(s): " + strings.Join(diagsStrings, "\n")) - } - - return object -} - -func (t VersionsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { - if in.Type() == nil { - return NewVersionsValueNull(), nil - } - - if !in.Type().Equal(t.TerraformType(ctx)) { - return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) - } - - if !in.IsKnown() { - return NewVersionsValueUnknown(), nil - } - - if in.IsNull() { - return NewVersionsValueNull(), nil - } - - attributes := map[string]attr.Value{} - - val := map[string]tftypes.Value{} - - err := in.As(&val) - - if err != nil { - return nil, err - } - - for k, v := range val { - a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) - - if err != nil { - return nil, err - } - - attributes[k] = a - } - - return NewVersionsValueMust(VersionsValue{}.AttributeTypes(ctx), attributes), nil -} - -func (t VersionsType) ValueType(ctx context.Context) attr.Value { - return VersionsValue{} -} - -var _ basetypes.ObjectValuable = VersionsValue{} - -type VersionsValue struct { - Beta basetypes.BoolValue `tfsdk:"beta"` - Deprecated basetypes.StringValue `tfsdk:"deprecated"` - Recommend basetypes.BoolValue `tfsdk:"recommend"` - Version basetypes.StringValue `tfsdk:"version"` - state attr.ValueState -} - -func (v VersionsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { - attrTypes := make(map[string]tftypes.Type, 4) - - var val tftypes.Value - var err error - - attrTypes["beta"] = basetypes.BoolType{}.TerraformType(ctx) - attrTypes["deprecated"] = basetypes.StringType{}.TerraformType(ctx) - attrTypes["recommend"] = basetypes.BoolType{}.TerraformType(ctx) - attrTypes["version"] = basetypes.StringType{}.TerraformType(ctx) - - objectType := tftypes.Object{AttributeTypes: attrTypes} - - switch v.state { - case attr.ValueStateKnown: - vals := make(map[string]tftypes.Value, 4) - - val, err = v.Beta.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["beta"] = val - - val, err = v.Deprecated.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["deprecated"] = val - - val, err = v.Recommend.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["recommend"] = val - - val, err = v.Version.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["version"] = val - - if err := tftypes.ValidateValue(objectType, vals); err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - return tftypes.NewValue(objectType, vals), nil - case attr.ValueStateNull: - return tftypes.NewValue(objectType, nil), nil - case attr.ValueStateUnknown: - return tftypes.NewValue(objectType, tftypes.UnknownValue), nil - default: - panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) - } -} - -func (v VersionsValue) IsNull() bool { - return v.state == attr.ValueStateNull -} - -func (v VersionsValue) IsUnknown() bool { - return v.state == attr.ValueStateUnknown -} - -func (v VersionsValue) String() string { - return "VersionsValue" -} - -func (v VersionsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { - var diags diag.Diagnostics - - attributeTypes := map[string]attr.Type{ - "beta": basetypes.BoolType{}, - "deprecated": basetypes.StringType{}, - "recommend": basetypes.BoolType{}, - "version": basetypes.StringType{}, - } - - if v.IsNull() { - return types.ObjectNull(attributeTypes), diags - } - - if v.IsUnknown() { - return types.ObjectUnknown(attributeTypes), diags - } - - objVal, diags := types.ObjectValue( - attributeTypes, - map[string]attr.Value{ - "beta": v.Beta, - "deprecated": v.Deprecated, - "recommend": v.Recommend, - "version": v.Version, - }) - - return objVal, diags -} - -func (v VersionsValue) Equal(o attr.Value) bool { - other, ok := o.(VersionsValue) - - if !ok { - return false - } - - if v.state != other.state { - return false - } - - if v.state != attr.ValueStateKnown { - return true - } - - if !v.Beta.Equal(other.Beta) { - return false - } - - if !v.Deprecated.Equal(other.Deprecated) { - return false - } - - if !v.Recommend.Equal(other.Recommend) { - return false - } - - if !v.Version.Equal(other.Version) { - return false - } - - return true -} - -func (v VersionsValue) Type(ctx context.Context) attr.Type { - return VersionsType{ - basetypes.ObjectType{ - AttrTypes: v.AttributeTypes(ctx), - }, - } -} - -func (v VersionsValue) AttributeTypes(ctx context.Context) map[string]attr.Type { - return map[string]attr.Type{ - "beta": basetypes.BoolType{}, - "deprecated": basetypes.StringType{}, - "recommend": basetypes.BoolType{}, - "version": basetypes.StringType{}, - } -} diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go new file mode 100644 index 00000000..70fbaca4 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/database/datasource.go @@ -0,0 +1,150 @@ +package sqlserverflexbeta + +import ( + "context" + "fmt" + "net/http" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/datasources_gen" +) + +var _ datasource.DataSource = (*databaseDataSource)(nil) + +const errorPrefix = "[Sqlserverflexbeta - Database]" + +func NewDatabaseDataSource() datasource.DataSource { + return &databaseDataSource{} +} + +type databaseDataSource struct { + client *sqlserverflexbetaPkg.APIClient + providerData core.ProviderData +} + +func (d *databaseDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database" +} + +func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = sqlserverflexbetaGen.DatabaseDataSourceSchema(ctx) +} + +// Configure adds the provider configured client to the data source. +func (d *databaseDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { + var ok bool + d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(d.providerData.RoundTripper), + utils.UserAgentConfigOption(d.providerData.Version), + } + if d.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithRegion(d.providerData.GetRegion()), + ) + } + apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return + } + d.client = apiClient + tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) +} + +func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data sqlserverflexbetaGen.DatabaseModel + readErr := "Read DB error" + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := data.ProjectId.ValueString() + region := d.providerData.GetRegionWithOverride(data.Region) + instanceId := data.InstanceId.ValueString() + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + databaseName := data.DatabaseName.ValueString() + + databaseResp, err := d.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() + if err != nil { + utils.LogError( + ctx, + &resp.Diagnostics, + err, + "Reading database", + fmt.Sprintf("database with %q does not exist in project %q.", databaseName, projectId), + map[int]string{ + http.StatusForbidden: fmt.Sprintf("Project with %q not found or forbidden access", projectId), + }, + ) + resp.State.RemoveResource(ctx) + return + } + + ctx = core.LogResponse(ctx) + + dbId, ok := databaseResp.GetIdOk() + if !ok { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + readErr, + "Database creation waiting: returned id is nil", + ) + return + } + data.Id = types.Int64Value(dbId) + + owner, ok := databaseResp.GetOwnerOk() + if !ok { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + readErr, + "Database creation waiting: returned owner is nil", + ) + return + } + data.Owner = types.StringValue(owner) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go new file mode 100644 index 00000000..cfdc1a86 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go @@ -0,0 +1,82 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func DatabaseDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "collation_name": schema.StringAttribute{ + Computed: true, + Description: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.", + MarkdownDescription: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.", + }, + "compatibility_level": schema.Int64Attribute{ + Computed: true, + Description: "CompatibilityLevel of the Database.", + MarkdownDescription: "CompatibilityLevel of the Database.", + }, + "database_name": schema.StringAttribute{ + Required: true, + Description: "The name of the database.", + MarkdownDescription: "The name of the database.", + }, + "id": schema.Int64Attribute{ + Computed: true, + Description: "The id of the database.", + MarkdownDescription: "The id of the database.", + }, + "instance_id": schema.StringAttribute{ + Required: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "name": schema.StringAttribute{ + Computed: true, + Description: "The name of the database.", + MarkdownDescription: "The name of the database.", + }, + "owner": schema.StringAttribute{ + Computed: true, + Description: "The owner of the database.", + MarkdownDescription: "The owner of the database.", + }, + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + }, + } +} + +type DatabaseModel struct { + CollationName types.String `tfsdk:"collation_name"` + CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"` + DatabaseName types.String `tfsdk:"database_name"` + Id types.Int64 `tfsdk:"id"` + InstanceId types.String `tfsdk:"instance_id"` + Name types.String `tfsdk:"name"` + Owner types.String `tfsdk:"owner"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` +} diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go new file mode 100644 index 00000000..71ec8fb4 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go @@ -0,0 +1,1180 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func DatabasesDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "databases": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "created": schema.StringAttribute{ + Computed: true, + Description: "The date when the database was created in RFC3339 format.", + MarkdownDescription: "The date when the database was created in RFC3339 format.", + }, + "id": schema.Int64Attribute{ + Computed: true, + Description: "The id of the database.", + MarkdownDescription: "The id of the database.", + }, + "name": schema.StringAttribute{ + Computed: true, + Description: "The name of the database.", + MarkdownDescription: "The name of the database.", + }, + "owner": schema.StringAttribute{ + Computed: true, + Description: "The owner of the database.", + MarkdownDescription: "The owner of the database.", + }, + }, + CustomType: DatabasesType{ + ObjectType: types.ObjectType{ + AttrTypes: DatabasesValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "A list containing all databases for the instance.", + MarkdownDescription: "A list containing all databases for the instance.", + }, + "instance_id": schema.StringAttribute{ + Required: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "page": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of the page of items list to be returned.", + MarkdownDescription: "Number of the page of items list to be returned.", + }, + "pagination": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "page": schema.Int64Attribute{ + Computed: true, + }, + "size": schema.Int64Attribute{ + Computed: true, + }, + "sort": schema.StringAttribute{ + Computed: true, + }, + "total_pages": schema.Int64Attribute{ + Computed: true, + }, + "total_rows": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: PaginationType{ + ObjectType: types.ObjectType{ + AttrTypes: PaginationValue{}.AttributeTypes(ctx), + }, + }, + Computed: true, + }, + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "size": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of items to be returned on each page.", + MarkdownDescription: "Number of items to be returned on each page.", + }, + "sort": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "Sorting of the databases to be returned on each page.", + MarkdownDescription: "Sorting of the databases to be returned on each page.", + Validators: []validator.String{ + stringvalidator.OneOf( + "created_at.desc", + "created_at.asc", + "database_id.desc", + "database_id.asc", + "database_name.desc", + "database_name.asc", + "database_owner.desc", + "database_owner.asc", + "index.asc", + "index.desc", + ), + }, + }, + }, + } +} + +type DatabasesModel struct { + Databases types.List `tfsdk:"databases"` + InstanceId types.String `tfsdk:"instance_id"` + Page types.Int64 `tfsdk:"page"` + Pagination PaginationValue `tfsdk:"pagination"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Size types.Int64 `tfsdk:"size"` + Sort types.String `tfsdk:"sort"` +} + +var _ basetypes.ObjectTypable = DatabasesType{} + +type DatabasesType struct { + basetypes.ObjectType +} + +func (t DatabasesType) Equal(o attr.Type) bool { + other, ok := o.(DatabasesType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t DatabasesType) String() string { + return "DatabasesType" +} + +func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + createdAttribute, ok := attributes["created"] + + if !ok { + diags.AddError( + "Attribute Missing", + `created is missing from object`) + + return nil, diags + } + + createdVal, ok := createdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute)) + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return nil, diags + } + + idVal, ok := idAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute)) + } + + nameAttribute, ok := attributes["name"] + + if !ok { + diags.AddError( + "Attribute Missing", + `name is missing from object`) + + return nil, diags + } + + nameVal, ok := nameAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute)) + } + + ownerAttribute, ok := attributes["owner"] + + if !ok { + diags.AddError( + "Attribute Missing", + `owner is missing from object`) + + return nil, diags + } + + ownerVal, ok := ownerAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`owner expected to be basetypes.StringValue, was: %T`, ownerAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return DatabasesValue{ + Created: createdVal, + Id: idVal, + Name: nameVal, + Owner: ownerVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewDatabasesValueNull() DatabasesValue { + return DatabasesValue{ + state: attr.ValueStateNull, + } +} + +func NewDatabasesValueUnknown() DatabasesValue { + return DatabasesValue{ + state: attr.ValueStateUnknown, + } +} + +func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (DatabasesValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing DatabasesValue Attribute Value", + "While creating a DatabasesValue value, a missing attribute value was detected. "+ + "A DatabasesValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("DatabasesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid DatabasesValue Attribute Type", + "While creating a DatabasesValue value, an invalid attribute value was detected. "+ + "A DatabasesValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("DatabasesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("DatabasesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra DatabasesValue Attribute Value", + "While creating a DatabasesValue value, an extra attribute value was detected. "+ + "A DatabasesValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra DatabasesValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewDatabasesValueUnknown(), diags + } + + createdAttribute, ok := attributes["created"] + + if !ok { + diags.AddError( + "Attribute Missing", + `created is missing from object`) + + return NewDatabasesValueUnknown(), diags + } + + createdVal, ok := createdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute)) + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return NewDatabasesValueUnknown(), diags + } + + idVal, ok := idAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute)) + } + + nameAttribute, ok := attributes["name"] + + if !ok { + diags.AddError( + "Attribute Missing", + `name is missing from object`) + + return NewDatabasesValueUnknown(), diags + } + + nameVal, ok := nameAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute)) + } + + ownerAttribute, ok := attributes["owner"] + + if !ok { + diags.AddError( + "Attribute Missing", + `owner is missing from object`) + + return NewDatabasesValueUnknown(), diags + } + + ownerVal, ok := ownerAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`owner expected to be basetypes.StringValue, was: %T`, ownerAttribute)) + } + + if diags.HasError() { + return NewDatabasesValueUnknown(), diags + } + + return DatabasesValue{ + Created: createdVal, + Id: idVal, + Name: nameVal, + Owner: ownerVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewDatabasesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) DatabasesValue { + object, diags := NewDatabasesValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewDatabasesValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t DatabasesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewDatabasesValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewDatabasesValueUnknown(), nil + } + + if in.IsNull() { + return NewDatabasesValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewDatabasesValueMust(DatabasesValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t DatabasesType) ValueType(ctx context.Context) attr.Value { + return DatabasesValue{} +} + +var _ basetypes.ObjectValuable = DatabasesValue{} + +type DatabasesValue struct { + Created basetypes.StringValue `tfsdk:"created"` + Id basetypes.Int64Value `tfsdk:"id"` + Name basetypes.StringValue `tfsdk:"name"` + Owner basetypes.StringValue `tfsdk:"owner"` + state attr.ValueState +} + +func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 4) + + var val tftypes.Value + var err error + + attrTypes["created"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 4) + + val, err = v.Created.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["created"] = val + + val, err = v.Id.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["id"] = val + + val, err = v.Name.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["name"] = val + + val, err = v.Owner.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["owner"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v DatabasesValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v DatabasesValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v DatabasesValue) String() string { + return "DatabasesValue" +} + +func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "created": basetypes.StringType{}, + "id": basetypes.Int64Type{}, + "name": basetypes.StringType{}, + "owner": basetypes.StringType{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "created": v.Created, + "id": v.Id, + "name": v.Name, + "owner": v.Owner, + }) + + return objVal, diags +} + +func (v DatabasesValue) Equal(o attr.Value) bool { + other, ok := o.(DatabasesValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Created.Equal(other.Created) { + return false + } + + if !v.Id.Equal(other.Id) { + return false + } + + if !v.Name.Equal(other.Name) { + return false + } + + if !v.Owner.Equal(other.Owner) { + return false + } + + return true +} + +func (v DatabasesValue) Type(ctx context.Context) attr.Type { + return DatabasesType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "created": basetypes.StringType{}, + "id": basetypes.Int64Type{}, + "name": basetypes.StringType{}, + "owner": basetypes.StringType{}, + } +} + +var _ basetypes.ObjectTypable = PaginationType{} + +type PaginationType struct { + basetypes.ObjectType +} + +func (t PaginationType) Equal(o attr.Type) bool { + other, ok := o.(PaginationType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t PaginationType) String() string { + return "PaginationType" +} + +func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return nil, diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return nil, diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return nil, diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return nil, diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return nil, diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueNull() PaginationValue { + return PaginationValue{ + state: attr.ValueStateNull, + } +} + +func NewPaginationValueUnknown() PaginationValue { + return PaginationValue{ + state: attr.ValueStateUnknown, + } +} + +func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing PaginationValue Attribute Value", + "While creating a PaginationValue value, a missing attribute value was detected. "+ + "A PaginationValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid PaginationValue Attribute Type", + "While creating a PaginationValue value, an invalid attribute value was detected. "+ + "A PaginationValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra PaginationValue Attribute Value", + "While creating a PaginationValue value, an extra attribute value was detected. "+ + "A PaginationValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue { + object, diags := NewPaginationValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewPaginationValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewPaginationValueUnknown(), nil + } + + if in.IsNull() { + return NewPaginationValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t PaginationType) ValueType(ctx context.Context) attr.Value { + return PaginationValue{} +} + +var _ basetypes.ObjectValuable = PaginationValue{} + +type PaginationValue struct { + Page basetypes.Int64Value `tfsdk:"page"` + Size basetypes.Int64Value `tfsdk:"size"` + Sort basetypes.StringValue `tfsdk:"sort"` + TotalPages basetypes.Int64Value `tfsdk:"total_pages"` + TotalRows basetypes.Int64Value `tfsdk:"total_rows"` + state attr.ValueState +} + +func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 5) + + var val tftypes.Value + var err error + + attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 5) + + val, err = v.Page.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["page"] = val + + val, err = v.Size.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["size"] = val + + val, err = v.Sort.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["sort"] = val + + val, err = v.TotalPages.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_pages"] = val + + val, err = v.TotalRows.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_rows"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v PaginationValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v PaginationValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v PaginationValue) String() string { + return "PaginationValue" +} + +func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "page": v.Page, + "size": v.Size, + "sort": v.Sort, + "total_pages": v.TotalPages, + "total_rows": v.TotalRows, + }) + + return objVal, diags +} + +func (v PaginationValue) Equal(o attr.Value) bool { + other, ok := o.(PaginationValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Page.Equal(other.Page) { + return false + } + + if !v.Size.Equal(other.Size) { + return false + } + + if !v.Sort.Equal(other.Sort) { + return false + } + + if !v.TotalPages.Equal(other.TotalPages) { + return false + } + + if !v.TotalRows.Equal(other.TotalRows) { + return false + } + + return true +} + +func (v PaginationValue) Type(ctx context.Context) attr.Type { + return PaginationType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go new file mode 100644 index 00000000..b28f5ea0 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -0,0 +1,426 @@ +package sqlserverflexbeta + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/resources_gen" +) + +var ( + _ resource.Resource = &databaseResource{} + _ resource.ResourceWithConfigure = &databaseResource{} + _ resource.ResourceWithImportState = &databaseResource{} + _ resource.ResourceWithModifyPlan = &databaseResource{} + _ resource.ResourceWithIdentity = &databaseResource{} +) + +func NewDatabaseResource() resource.Resource { + return &databaseResource{} +} + +type databaseResource struct { + client *sqlserverflexbeta.APIClient + providerData core.ProviderData +} + +type DatabaseResourceIdentityModel struct { + ProjectID types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + InstanceID types.String `tfsdk:"instance_id"` + DatabaseName types.String `tfsdk:"database_name"` +} + +func (r *databaseResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database" +} + +func (r *databaseResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = sqlserverflexbetaResGen.DatabaseResourceSchema(ctx) +} + +func (r *databaseResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { + resp.IdentitySchema = identityschema.Schema{ + Attributes: map[string]identityschema.Attribute{ + "project_id": identityschema.StringAttribute{ + RequiredForImport: true, // must be set during import by the practitioner + }, + "region": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "instance_id": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "database_name": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + }, + } +} + +// Configure adds the provider configured client to the resource. +func (r *databaseResource) Configure( + ctx context.Context, + req resource.ConfigureRequest, + resp *resource.ConfigureResponse, +) { + var ok bool + r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(r.providerData.RoundTripper), + utils.UserAgentConfigOption(r.providerData.Version), + } + if r.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint)) + } else { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) + } + apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return + } + r.client = apiClient + tflog.Info(ctx, "sqlserverflexbeta.Database client configured") +} + +func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data sqlserverflexbetaResGen.DatabaseModel + createErr := "DB create error" + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + databaseName := identityData.DatabaseName.ValueString() + ctx = tflog.SetField(ctx, "database_name", databaseName) + + payLoad := sqlserverflexbeta.CreateDatabaseRequestPayload{ + Collation: data.Collation.ValueStringPointer(), + Compatibility: data.Compatibility.ValueInt64Pointer(), + Name: data.Name.ValueStringPointer(), + Owner: data.Owner.ValueStringPointer(), + } + + createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId). + CreateDatabaseRequestPayload(payLoad). + Execute() + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + fmt.Sprintf("Calling API: %v", err), + ) + return + } + + ctx = core.LogResponse(ctx) + createId, ok := createResp.GetIdOk() + if !ok { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + fmt.Sprintf("Calling API: %v", err), + ) + } + + waitResp, err := wait.CreateDatabaseWaitHandler( + ctx, + r.client, + projectId, + instanceId, + region, + databaseName, + ).SetSleepBeforeWait( + 30 * time.Second, + ).SetTimeout( + 15 * time.Minute, + ).WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + fmt.Sprintf("Database creation waiting: %v", err), + ) + return + } + + if waitResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + "Database creation waiting: returned id is nil", + ) + return + } + + if *waitResp.Id != createId { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + "Database creation waiting: returned id is different", + ) + return + } + + if *waitResp.Owner != data.Owner.ValueString() { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + "Database creation waiting: returned owner is different", + ) + return + } + + if *waitResp.Name != data.Name.ValueString() { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + "Database creation waiting: returned name is different", + ) + return + } + + data.Id = types.Int64PointerValue(waitResp.Id) + data.Name = types.StringPointerValue(waitResp.Name) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "sqlserverflexbeta.Database created") +} + +func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data sqlserverflexbetaResGen.DatabaseModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Todo: Read API call logic + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + // TODO: Set data returned by API in identity + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + // InstanceID: types.StringValue(instanceId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Info(ctx, "sqlserverflexbeta.Database read") +} + +func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var data sqlserverflexbetaResGen.DatabaseModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Todo: Update API call logic + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "sqlserverflexbeta.Database updated") +} + +func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data sqlserverflexbetaResGen.DatabaseModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Todo: Delete API call logic + + tflog.Info(ctx, "sqlserverflexbeta.Database deleted") +} + +// ModifyPlan implements resource.ResourceWithModifyPlan. +// Use the modifier to set the effective region in the current plan. +func (r *databaseResource) ModifyPlan( + ctx context.Context, + req resource.ModifyPlanRequest, + resp *resource.ModifyPlanResponse, +) { // nolint:gocritic // function signature required by Terraform + var configModel sqlserverflexbetaResGen.DatabaseModel + // skip initial empty configuration to avoid follow-up errors + if req.Config.Raw.IsNull() { + return + } + resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) + if resp.Diagnostics.HasError() { + return + } + + var planModel sqlserverflexbetaResGen.DatabaseModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) + if resp.Diagnostics.HasError() { + return + } + + utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp) + if resp.Diagnostics.HasError() { + return + } + + var identityModel DatabaseResourceIdentityModel + identityModel.ProjectID = planModel.ProjectId + identityModel.Region = planModel.Region + // TODO: complete + //if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { + // identityModel.InstanceID = planModel.InstanceId + //} + + resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) + if resp.Diagnostics.HasError() { + return + } +} + +// ImportState imports a resource into the Terraform state on success. +// The expected format of the resource import identifier is: project_id,zone_id,record_set_id +func (r *databaseResource) ImportState( + ctx context.Context, + req resource.ImportStateRequest, + resp *resource.ImportStateResponse, +) { + idParts := strings.Split(req.ID, core.Separator) + + // Todo: Import logic + if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" { + core.LogAndAddError( + ctx, &resp.Diagnostics, + "Error importing database", + fmt.Sprintf( + "Expected import identifier with format [project_id],[region],..., got %q", + req.ID, + ), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + // ... more ... + + core.LogAndAddWarning( + ctx, + &resp.Diagnostics, + "Sqlserverflexbeta database imported with empty password", + "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", + ) + tflog.Info(ctx, "Sqlserverflexbeta database state imported") +} diff --git a/stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go new file mode 100644 index 00000000..dccae0c4 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go @@ -0,0 +1,99 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema" +) + +func DatabaseResourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "collation": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.", + MarkdownDescription: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.", + }, + "collation_name": schema.StringAttribute{ + Computed: true, + Description: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.", + MarkdownDescription: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.", + }, + "compatibility": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "CompatibilityLevel of the Database.", + MarkdownDescription: "CompatibilityLevel of the Database.", + }, + "compatibility_level": schema.Int64Attribute{ + Computed: true, + Description: "CompatibilityLevel of the Database.", + MarkdownDescription: "CompatibilityLevel of the Database.", + }, + "database_name": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The name of the database.", + MarkdownDescription: "The name of the database.", + }, + "id": schema.Int64Attribute{ + Computed: true, + Description: "The id of the database.", + MarkdownDescription: "The id of the database.", + }, + "instance_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "name": schema.StringAttribute{ + Required: true, + Description: "The name of the database.", + MarkdownDescription: "The name of the database.", + }, + "owner": schema.StringAttribute{ + Required: true, + Description: "The owner of the database.", + MarkdownDescription: "The owner of the database.", + }, + "project_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + }, + } +} + +type DatabaseModel struct { + Collation types.String `tfsdk:"collation"` + CollationName types.String `tfsdk:"collation_name"` + Compatibility types.Int64 `tfsdk:"compatibility"` + CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"` + DatabaseName types.String `tfsdk:"database_name"` + Id types.Int64 `tfsdk:"id"` + InstanceId types.String `tfsdk:"instance_id"` + Name types.String `tfsdk:"name"` + Owner types.String `tfsdk:"owner"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` +} diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go new file mode 100644 index 00000000..55f12a9a --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go @@ -0,0 +1,335 @@ +package sqlserverFlexBetaFlavor + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &flavorDataSource{} + _ datasource.DataSourceWithConfigure = &flavorDataSource{} +) + +type FlavorModel struct { + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + StorageClass types.String `tfsdk:"storage_class"` + Cpu types.Int64 `tfsdk:"cpu"` + Description types.String `tfsdk:"description"` + Id types.String `tfsdk:"id"` + FlavorId types.String `tfsdk:"flavor_id"` + MaxGb types.Int64 `tfsdk:"max_gb"` + Memory types.Int64 `tfsdk:"ram"` + MinGb types.Int64 `tfsdk:"min_gb"` + NodeType types.String `tfsdk:"node_type"` + StorageClasses types.List `tfsdk:"storage_classes"` +} + +// NewFlavorDataSource is a helper function to simplify the provider implementation. +func NewFlavorDataSource() datasource.DataSource { + return &flavorDataSource{} +} + +// flavorDataSource is the data source implementation. +type flavorDataSource struct { + client *sqlserverflexbetaPkg.APIClient + providerData core.ProviderData +} + +// Metadata returns the data source type name. +func (r *flavorDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavor" +} + +// Configure adds the provider configured client to the data source. +func (r *flavorDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + var ok bool + r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(r.providerData.RoundTripper), + utils.UserAgentConfigOption(r.providerData.Version), + } + if r.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithRegion(r.providerData.GetRegion()), + ) + } + apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return + } + r.client = apiClient + tflog.Info(ctx, "Postgres Flex instance client configured") +} + +func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "cpu": schema.Int64Attribute{ + Computed: true, + Description: "The cpu count of the instance.", + MarkdownDescription: "The cpu count of the instance.", + }, + "description": schema.StringAttribute{ + Computed: true, + Description: "The flavor description.", + MarkdownDescription: "The flavor description.", + }, + "id": schema.StringAttribute{ + Computed: true, + Description: "The id of the instance flavor.", + MarkdownDescription: "The id of the instance flavor.", + }, + "max_gb": schema.Int64Attribute{ + Computed: true, + Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + }, + "memory": schema.Int64Attribute{ + Computed: true, + Description: "The memory of the instance in Gibibyte.", + MarkdownDescription: "The memory of the instance in Gibibyte.", + }, + "min_gb": schema.Int64Attribute{ + Computed: true, + Description: "minimum storage which is required to order in Gigabyte.", + MarkdownDescription: "minimum storage which is required to order in Gigabyte.", + }, + "node_type": schema.StringAttribute{ + Computed: true, + Description: "defines the nodeType it can be either single or HA", + MarkdownDescription: "defines the nodeType it can be either single or HA", + }, + "storage_classes": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "class": schema.StringAttribute{ + Computed: true, + }, + "max_io_per_sec": schema.Int64Attribute{ + Computed: true, + }, + "max_through_in_mb": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: sqlserverflexbetaGen.StorageClassesType{ + ObjectType: types.ObjectType{ + AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + }, + }, + //Attributes: map[string]schema.Attribute{ + // "project_id": schema.StringAttribute{ + // Required: true, + // Description: "The cpu count of the instance.", + // MarkdownDescription: "The cpu count of the instance.", + // }, + // "region": schema.StringAttribute{ + // Required: true, + // Description: "The flavor description.", + // MarkdownDescription: "The flavor description.", + // }, + // "cpu": schema.Int64Attribute{ + // Required: true, + // Description: "The cpu count of the instance.", + // MarkdownDescription: "The cpu count of the instance.", + // }, + // "ram": schema.Int64Attribute{ + // Required: true, + // Description: "The memory of the instance in Gibibyte.", + // MarkdownDescription: "The memory of the instance in Gibibyte.", + // }, + // "storage_class": schema.StringAttribute{ + // Required: true, + // Description: "The memory of the instance in Gibibyte.", + // MarkdownDescription: "The memory of the instance in Gibibyte.", + // }, + // "description": schema.StringAttribute{ + // Computed: true, + // Description: "The flavor description.", + // MarkdownDescription: "The flavor description.", + // }, + // "id": schema.StringAttribute{ + // Computed: true, + // Description: "The terraform id of the instance flavor.", + // MarkdownDescription: "The terraform id of the instance flavor.", + // }, + // "flavor_id": schema.StringAttribute{ + // Computed: true, + // Description: "The flavor id of the instance flavor.", + // MarkdownDescription: "The flavor id of the instance flavor.", + // }, + // "max_gb": schema.Int64Attribute{ + // Computed: true, + // Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + // MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + // }, + // "min_gb": schema.Int64Attribute{ + // Computed: true, + // Description: "minimum storage which is required to order in Gigabyte.", + // MarkdownDescription: "minimum storage which is required to order in Gigabyte.", + // }, + // "node_type": schema.StringAttribute{ + // Required: true, + // Description: "defines the nodeType it can be either single or replica", + // MarkdownDescription: "defines the nodeType it can be either single or replica", + // }, + // "storage_classes": schema.ListNestedAttribute{ + // Computed: true, + // NestedObject: schema.NestedAttributeObject{ + // Attributes: map[string]schema.Attribute{ + // "class": schema.StringAttribute{ + // Computed: true, + // }, + // "max_io_per_sec": schema.Int64Attribute{ + // Computed: true, + // }, + // "max_through_in_mb": schema.Int64Attribute{ + // Computed: true, + // }, + // }, + // CustomType: sqlserverflexalphaGen.StorageClassesType{ + // ObjectType: types.ObjectType{ + // AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx), + // }, + // }, + // }, + // }, + //}, + } +} + +func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var model FlavorModel + diags := req.Config.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := model.ProjectId.ValueString() + region := r.providerData.GetRegionWithOverride(model.Region) + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + flavors, err := getAllFlavors(ctx, r.client, projectId, region) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err)) + return + } + + var foundFlavors []sqlserverflexbetaPkg.ListFlavors + for _, flavor := range flavors { + if model.Cpu.ValueInt64() != *flavor.Cpu { + continue + } + if model.Memory.ValueInt64() != *flavor.Memory { + continue + } + if model.NodeType.ValueString() != *flavor.NodeType { + continue + } + for _, sc := range *flavor.StorageClasses { + if model.StorageClass.ValueString() != *sc.Class { + continue + } + foundFlavors = append(foundFlavors, flavor) + } + } + if len(foundFlavors) == 0 { + resp.Diagnostics.AddError("get flavor", "could not find requested flavor") + return + } + if len(foundFlavors) > 1 { + resp.Diagnostics.AddError("get flavor", "found too many matching flavors") + return + } + + f := foundFlavors[0] + model.Description = types.StringValue(*f.Description) + model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id) + model.FlavorId = types.StringValue(*f.Id) + model.MaxGb = types.Int64Value(*f.MaxGB) + model.MinGb = types.Int64Value(*f.MinGB) + + if f.StorageClasses == nil { + model.StorageClasses = types.ListNull(sqlserverflexbetaGen.StorageClassesType{ + ObjectType: basetypes.ObjectType{ + AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx), + }, + }) + } else { + var scList []attr.Value + for _, sc := range *f.StorageClasses { + scList = append( + scList, + sqlserverflexbetaGen.NewStorageClassesValueMust( + sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "class": types.StringValue(*sc.Class), + "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec), + "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb), + }, + ), + ) + } + storageClassesList := types.ListValueMust( + sqlserverflexbetaGen.StorageClassesType{ + ObjectType: basetypes.ObjectType{ + AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + scList, + ) + model.StorageClasses = storageClassesList + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "Postgres Flex flavors read") +} diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go new file mode 100644 index 00000000..a766197e --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go @@ -0,0 +1,1909 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func FlavorDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "flavors": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "cpu": schema.Int64Attribute{ + Computed: true, + Description: "The cpu count of the instance.", + MarkdownDescription: "The cpu count of the instance.", + }, + "description": schema.StringAttribute{ + Computed: true, + Description: "The flavor description.", + MarkdownDescription: "The flavor description.", + }, + "id": schema.StringAttribute{ + Computed: true, + Description: "The id of the instance flavor.", + MarkdownDescription: "The id of the instance flavor.", + }, + "max_gb": schema.Int64Attribute{ + Computed: true, + Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + }, + "memory": schema.Int64Attribute{ + Computed: true, + Description: "The memory of the instance in Gibibyte.", + MarkdownDescription: "The memory of the instance in Gibibyte.", + }, + "min_gb": schema.Int64Attribute{ + Computed: true, + Description: "minimum storage which is required to order in Gigabyte.", + MarkdownDescription: "minimum storage which is required to order in Gigabyte.", + }, + "node_type": schema.StringAttribute{ + Computed: true, + Description: "defines the nodeType it can be either single or HA", + MarkdownDescription: "defines the nodeType it can be either single or HA", + }, + "storage_classes": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "class": schema.StringAttribute{ + Computed: true, + }, + "max_io_per_sec": schema.Int64Attribute{ + Computed: true, + }, + "max_through_in_mb": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: StorageClassesType{ + ObjectType: types.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + }, + }, + CustomType: FlavorsType{ + ObjectType: types.ObjectType{ + AttrTypes: FlavorsValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "List of flavors available for the project.", + MarkdownDescription: "List of flavors available for the project.", + }, + "page": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of the page of items list to be returned.", + MarkdownDescription: "Number of the page of items list to be returned.", + }, + "pagination": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "page": schema.Int64Attribute{ + Computed: true, + }, + "size": schema.Int64Attribute{ + Computed: true, + }, + "sort": schema.StringAttribute{ + Computed: true, + }, + "total_pages": schema.Int64Attribute{ + Computed: true, + }, + "total_rows": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: PaginationType{ + ObjectType: types.ObjectType{ + AttrTypes: PaginationValue{}.AttributeTypes(ctx), + }, + }, + Computed: true, + }, + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "size": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of items to be returned on each page.", + MarkdownDescription: "Number of items to be returned on each page.", + }, + "sort": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "Sorting of the flavors to be returned on each page.", + MarkdownDescription: "Sorting of the flavors to be returned on each page.", + Validators: []validator.String{ + stringvalidator.OneOf( + "index.desc", + "index.asc", + "cpu.desc", + "cpu.asc", + "flavor_description.asc", + "flavor_description.desc", + "id.desc", + "id.asc", + "size_max.desc", + "size_max.asc", + "ram.desc", + "ram.asc", + "size_min.desc", + "size_min.asc", + "storage_class.asc", + "storage_class.desc", + "node_type.asc", + "node_type.desc", + ), + }, + }, + }, + } +} + +type FlavorModel struct { + Flavors types.List `tfsdk:"flavors"` + Page types.Int64 `tfsdk:"page"` + Pagination PaginationValue `tfsdk:"pagination"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Size types.Int64 `tfsdk:"size"` + Sort types.String `tfsdk:"sort"` +} + +var _ basetypes.ObjectTypable = FlavorsType{} + +type FlavorsType struct { + basetypes.ObjectType +} + +func (t FlavorsType) Equal(o attr.Type) bool { + other, ok := o.(FlavorsType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t FlavorsType) String() string { + return "FlavorsType" +} + +func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + cpuAttribute, ok := attributes["cpu"] + + if !ok { + diags.AddError( + "Attribute Missing", + `cpu is missing from object`) + + return nil, diags + } + + cpuVal, ok := cpuAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute)) + } + + descriptionAttribute, ok := attributes["description"] + + if !ok { + diags.AddError( + "Attribute Missing", + `description is missing from object`) + + return nil, diags + } + + descriptionVal, ok := descriptionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute)) + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return nil, diags + } + + idVal, ok := idAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) + } + + maxGbAttribute, ok := attributes["max_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_gb is missing from object`) + + return nil, diags + } + + maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute)) + } + + memoryAttribute, ok := attributes["memory"] + + if !ok { + diags.AddError( + "Attribute Missing", + `memory is missing from object`) + + return nil, diags + } + + memoryVal, ok := memoryAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute)) + } + + minGbAttribute, ok := attributes["min_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `min_gb is missing from object`) + + return nil, diags + } + + minGbVal, ok := minGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute)) + } + + nodeTypeAttribute, ok := attributes["node_type"] + + if !ok { + diags.AddError( + "Attribute Missing", + `node_type is missing from object`) + + return nil, diags + } + + nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute)) + } + + storageClassesAttribute, ok := attributes["storage_classes"] + + if !ok { + diags.AddError( + "Attribute Missing", + `storage_classes is missing from object`) + + return nil, diags + } + + storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return FlavorsValue{ + Cpu: cpuVal, + Description: descriptionVal, + Id: idVal, + MaxGb: maxGbVal, + Memory: memoryVal, + MinGb: minGbVal, + NodeType: nodeTypeVal, + StorageClasses: storageClassesVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewFlavorsValueNull() FlavorsValue { + return FlavorsValue{ + state: attr.ValueStateNull, + } +} + +func NewFlavorsValueUnknown() FlavorsValue { + return FlavorsValue{ + state: attr.ValueStateUnknown, + } +} + +func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing FlavorsValue Attribute Value", + "While creating a FlavorsValue value, a missing attribute value was detected. "+ + "A FlavorsValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid FlavorsValue Attribute Type", + "While creating a FlavorsValue value, an invalid attribute value was detected. "+ + "A FlavorsValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra FlavorsValue Attribute Value", + "While creating a FlavorsValue value, an extra attribute value was detected. "+ + "A FlavorsValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewFlavorsValueUnknown(), diags + } + + cpuAttribute, ok := attributes["cpu"] + + if !ok { + diags.AddError( + "Attribute Missing", + `cpu is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + cpuVal, ok := cpuAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute)) + } + + descriptionAttribute, ok := attributes["description"] + + if !ok { + diags.AddError( + "Attribute Missing", + `description is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + descriptionVal, ok := descriptionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute)) + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + idVal, ok := idAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) + } + + maxGbAttribute, ok := attributes["max_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_gb is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute)) + } + + memoryAttribute, ok := attributes["memory"] + + if !ok { + diags.AddError( + "Attribute Missing", + `memory is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + memoryVal, ok := memoryAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute)) + } + + minGbAttribute, ok := attributes["min_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `min_gb is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + minGbVal, ok := minGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute)) + } + + nodeTypeAttribute, ok := attributes["node_type"] + + if !ok { + diags.AddError( + "Attribute Missing", + `node_type is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute)) + } + + storageClassesAttribute, ok := attributes["storage_classes"] + + if !ok { + diags.AddError( + "Attribute Missing", + `storage_classes is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute)) + } + + if diags.HasError() { + return NewFlavorsValueUnknown(), diags + } + + return FlavorsValue{ + Cpu: cpuVal, + Description: descriptionVal, + Id: idVal, + MaxGb: maxGbVal, + Memory: memoryVal, + MinGb: minGbVal, + NodeType: nodeTypeVal, + StorageClasses: storageClassesVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue { + object, diags := NewFlavorsValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewFlavorsValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewFlavorsValueUnknown(), nil + } + + if in.IsNull() { + return NewFlavorsValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t FlavorsType) ValueType(ctx context.Context) attr.Value { + return FlavorsValue{} +} + +var _ basetypes.ObjectValuable = FlavorsValue{} + +type FlavorsValue struct { + Cpu basetypes.Int64Value `tfsdk:"cpu"` + Description basetypes.StringValue `tfsdk:"description"` + Id basetypes.StringValue `tfsdk:"id"` + MaxGb basetypes.Int64Value `tfsdk:"max_gb"` + Memory basetypes.Int64Value `tfsdk:"memory"` + MinGb basetypes.Int64Value `tfsdk:"min_gb"` + NodeType basetypes.StringValue `tfsdk:"node_type"` + StorageClasses basetypes.ListValue `tfsdk:"storage_classes"` + state attr.ValueState +} + +func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 8) + + var val tftypes.Value + var err error + + attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["storage_classes"] = basetypes.ListType{ + ElemType: StorageClassesValue{}.Type(ctx), + }.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 8) + + val, err = v.Cpu.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["cpu"] = val + + val, err = v.Description.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["description"] = val + + val, err = v.Id.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["id"] = val + + val, err = v.MaxGb.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["max_gb"] = val + + val, err = v.Memory.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["memory"] = val + + val, err = v.MinGb.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["min_gb"] = val + + val, err = v.NodeType.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["node_type"] = val + + val, err = v.StorageClasses.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["storage_classes"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v FlavorsValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v FlavorsValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v FlavorsValue) String() string { + return "FlavorsValue" +} + +func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + storageClasses := types.ListValueMust( + StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + v.StorageClasses.Elements(), + ) + + if v.StorageClasses.IsNull() { + storageClasses = types.ListNull( + StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + ) + } + + if v.StorageClasses.IsUnknown() { + storageClasses = types.ListUnknown( + StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + ) + } + + attributeTypes := map[string]attr.Type{ + "cpu": basetypes.Int64Type{}, + "description": basetypes.StringType{}, + "id": basetypes.StringType{}, + "max_gb": basetypes.Int64Type{}, + "memory": basetypes.Int64Type{}, + "min_gb": basetypes.Int64Type{}, + "node_type": basetypes.StringType{}, + "storage_classes": basetypes.ListType{ + ElemType: StorageClassesValue{}.Type(ctx), + }, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "cpu": v.Cpu, + "description": v.Description, + "id": v.Id, + "max_gb": v.MaxGb, + "memory": v.Memory, + "min_gb": v.MinGb, + "node_type": v.NodeType, + "storage_classes": storageClasses, + }) + + return objVal, diags +} + +func (v FlavorsValue) Equal(o attr.Value) bool { + other, ok := o.(FlavorsValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Cpu.Equal(other.Cpu) { + return false + } + + if !v.Description.Equal(other.Description) { + return false + } + + if !v.Id.Equal(other.Id) { + return false + } + + if !v.MaxGb.Equal(other.MaxGb) { + return false + } + + if !v.Memory.Equal(other.Memory) { + return false + } + + if !v.MinGb.Equal(other.MinGb) { + return false + } + + if !v.NodeType.Equal(other.NodeType) { + return false + } + + if !v.StorageClasses.Equal(other.StorageClasses) { + return false + } + + return true +} + +func (v FlavorsValue) Type(ctx context.Context) attr.Type { + return FlavorsType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "cpu": basetypes.Int64Type{}, + "description": basetypes.StringType{}, + "id": basetypes.StringType{}, + "max_gb": basetypes.Int64Type{}, + "memory": basetypes.Int64Type{}, + "min_gb": basetypes.Int64Type{}, + "node_type": basetypes.StringType{}, + "storage_classes": basetypes.ListType{ + ElemType: StorageClassesValue{}.Type(ctx), + }, + } +} + +var _ basetypes.ObjectTypable = StorageClassesType{} + +type StorageClassesType struct { + basetypes.ObjectType +} + +func (t StorageClassesType) Equal(o attr.Type) bool { + other, ok := o.(StorageClassesType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t StorageClassesType) String() string { + return "StorageClassesType" +} + +func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return nil, diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + maxIoPerSecAttribute, ok := attributes["max_io_per_sec"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_io_per_sec is missing from object`) + + return nil, diags + } + + maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute)) + } + + maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_through_in_mb is missing from object`) + + return nil, diags + } + + maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return StorageClassesValue{ + Class: classVal, + MaxIoPerSec: maxIoPerSecVal, + MaxThroughInMb: maxThroughInMbVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageClassesValueNull() StorageClassesValue { + return StorageClassesValue{ + state: attr.ValueStateNull, + } +} + +func NewStorageClassesValueUnknown() StorageClassesValue { + return StorageClassesValue{ + state: attr.ValueStateUnknown, + } +} + +func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing StorageClassesValue Attribute Value", + "While creating a StorageClassesValue value, a missing attribute value was detected. "+ + "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid StorageClassesValue Attribute Type", + "While creating a StorageClassesValue value, an invalid attribute value was detected. "+ + "A StorageClassesValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra StorageClassesValue Attribute Value", + "While creating a StorageClassesValue value, an extra attribute value was detected. "+ + "A StorageClassesValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewStorageClassesValueUnknown(), diags + } + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return NewStorageClassesValueUnknown(), diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + maxIoPerSecAttribute, ok := attributes["max_io_per_sec"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_io_per_sec is missing from object`) + + return NewStorageClassesValueUnknown(), diags + } + + maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute)) + } + + maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_through_in_mb is missing from object`) + + return NewStorageClassesValueUnknown(), diags + } + + maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute)) + } + + if diags.HasError() { + return NewStorageClassesValueUnknown(), diags + } + + return StorageClassesValue{ + Class: classVal, + MaxIoPerSec: maxIoPerSecVal, + MaxThroughInMb: maxThroughInMbVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue { + object, diags := NewStorageClassesValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewStorageClassesValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewStorageClassesValueUnknown(), nil + } + + if in.IsNull() { + return NewStorageClassesValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t StorageClassesType) ValueType(ctx context.Context) attr.Value { + return StorageClassesValue{} +} + +var _ basetypes.ObjectValuable = StorageClassesValue{} + +type StorageClassesValue struct { + Class basetypes.StringValue `tfsdk:"class"` + MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"` + MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"` + state attr.ValueState +} + +func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 3) + + var val tftypes.Value + var err error + + attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 3) + + val, err = v.Class.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["class"] = val + + val, err = v.MaxIoPerSec.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["max_io_per_sec"] = val + + val, err = v.MaxThroughInMb.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["max_through_in_mb"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v StorageClassesValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v StorageClassesValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v StorageClassesValue) String() string { + return "StorageClassesValue" +} + +func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "class": basetypes.StringType{}, + "max_io_per_sec": basetypes.Int64Type{}, + "max_through_in_mb": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "class": v.Class, + "max_io_per_sec": v.MaxIoPerSec, + "max_through_in_mb": v.MaxThroughInMb, + }) + + return objVal, diags +} + +func (v StorageClassesValue) Equal(o attr.Value) bool { + other, ok := o.(StorageClassesValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Class.Equal(other.Class) { + return false + } + + if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) { + return false + } + + if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) { + return false + } + + return true +} + +func (v StorageClassesValue) Type(ctx context.Context) attr.Type { + return StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "class": basetypes.StringType{}, + "max_io_per_sec": basetypes.Int64Type{}, + "max_through_in_mb": basetypes.Int64Type{}, + } +} + +var _ basetypes.ObjectTypable = PaginationType{} + +type PaginationType struct { + basetypes.ObjectType +} + +func (t PaginationType) Equal(o attr.Type) bool { + other, ok := o.(PaginationType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t PaginationType) String() string { + return "PaginationType" +} + +func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return nil, diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return nil, diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return nil, diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return nil, diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return nil, diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueNull() PaginationValue { + return PaginationValue{ + state: attr.ValueStateNull, + } +} + +func NewPaginationValueUnknown() PaginationValue { + return PaginationValue{ + state: attr.ValueStateUnknown, + } +} + +func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing PaginationValue Attribute Value", + "While creating a PaginationValue value, a missing attribute value was detected. "+ + "A PaginationValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid PaginationValue Attribute Type", + "While creating a PaginationValue value, an invalid attribute value was detected. "+ + "A PaginationValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra PaginationValue Attribute Value", + "While creating a PaginationValue value, an extra attribute value was detected. "+ + "A PaginationValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue { + object, diags := NewPaginationValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewPaginationValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewPaginationValueUnknown(), nil + } + + if in.IsNull() { + return NewPaginationValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t PaginationType) ValueType(ctx context.Context) attr.Value { + return PaginationValue{} +} + +var _ basetypes.ObjectValuable = PaginationValue{} + +type PaginationValue struct { + Page basetypes.Int64Value `tfsdk:"page"` + Size basetypes.Int64Value `tfsdk:"size"` + Sort basetypes.StringValue `tfsdk:"sort"` + TotalPages basetypes.Int64Value `tfsdk:"total_pages"` + TotalRows basetypes.Int64Value `tfsdk:"total_rows"` + state attr.ValueState +} + +func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 5) + + var val tftypes.Value + var err error + + attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 5) + + val, err = v.Page.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["page"] = val + + val, err = v.Size.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["size"] = val + + val, err = v.Sort.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["sort"] = val + + val, err = v.TotalPages.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_pages"] = val + + val, err = v.TotalRows.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_rows"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v PaginationValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v PaginationValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v PaginationValue) String() string { + return "PaginationValue" +} + +func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "page": v.Page, + "size": v.Size, + "sort": v.Sort, + "total_pages": v.TotalPages, + "total_rows": v.TotalRows, + }) + + return objVal, diags +} + +func (v PaginationValue) Equal(o attr.Value) bool { + other, ok := o.(PaginationValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Page.Equal(other.Page) { + return false + } + + if !v.Size.Equal(other.Size) { + return false + } + + if !v.Sort.Equal(other.Sort) { + return false + } + + if !v.TotalPages.Equal(other.TotalPages) { + return false + } + + if !v.TotalRows.Equal(other.TotalRows) { + return false + } + + return true +} + +func (v PaginationValue) Type(ctx context.Context) attr.Type { + return PaginationType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/functions.go b/stackit/internal/services/sqlserverflexbeta/flavor/functions.go new file mode 100644 index 00000000..8c06da73 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/flavor/functions.go @@ -0,0 +1,65 @@ +package sqlserverFlexBetaFlavor + +import ( + "context" + "fmt" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" +) + +type flavorsClientReader interface { + GetFlavorsRequest( + ctx context.Context, + projectId, region string, + ) sqlserverflexbeta.ApiGetFlavorsRequestRequest +} + +func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) ( + []sqlserverflexbeta.ListFlavors, + error, +) { + getAllFilter := func(_ sqlserverflexbeta.ListFlavors) bool { return true } + flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter) + if err != nil { + return nil, err + } + return flavorList, nil +} + +// getFlavorsByFilter is a helper function to retrieve flavors using a filtern function. +// Hint: The API does not have a GetFlavors endpoint, only ListFlavors +func getFlavorsByFilter( + ctx context.Context, + client flavorsClientReader, + projectId, region string, + filter func(db sqlserverflexbeta.ListFlavors) bool, +) ([]sqlserverflexbeta.ListFlavors, error) { + if projectId == "" || region == "" { + return nil, fmt.Errorf("listing sqlserverflexbeta flavors: projectId and region are required") + } + + const pageSize = 25 + + var result = make([]sqlserverflexbeta.ListFlavors, 0) + + for page := int64(1); ; page++ { + res, err := client.GetFlavorsRequest(ctx, projectId, region). + Page(page).Size(pageSize).Sort(sqlserverflexbeta.FLAVORSORT_INDEX_ASC).Execute() + if err != nil { + return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err) + } + + // If the API returns no flavors, we have reached the end of the list. + if res.Flavors == nil || len(*res.Flavors) == 0 { + break + } + + for _, flavor := range *res.Flavors { + if filter(flavor) { + result = append(result, flavor) + } + } + } + + return result, nil +} diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go b/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go new file mode 100644 index 00000000..974f1fa4 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go @@ -0,0 +1,134 @@ +package sqlserverFlexBetaFlavor + +import ( + "context" + "testing" + + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" +) + +type mockRequest struct { + executeFunc func() (*sqlserverflexbeta.GetFlavorsResponse, error) +} + +func (m *mockRequest) Page(_ int64) sqlserverflexbeta.ApiGetFlavorsRequestRequest { return m } +func (m *mockRequest) Size(_ int64) sqlserverflexbeta.ApiGetFlavorsRequestRequest { return m } +func (m *mockRequest) Sort(_ sqlserverflexbeta.FlavorSort) sqlserverflexbeta.ApiGetFlavorsRequestRequest { + return m +} +func (m *mockRequest) Execute() (*sqlserverflexbeta.GetFlavorsResponse, error) { + return m.executeFunc() +} + +type mockFlavorsClient struct { + executeRequest func() sqlserverflexbeta.ApiGetFlavorsRequestRequest +} + +func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) sqlserverflexbeta.ApiGetFlavorsRequestRequest { + return m.executeRequest() +} + +var mockResp = func(page int64) (*sqlserverflexbeta.GetFlavorsResponse, error) { + if page == 1 { + return &sqlserverflexbeta.GetFlavorsResponse{ + Flavors: &[]sqlserverflexbeta.ListFlavors{ + {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")}, + {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")}, + }, + }, nil + } + if page == 2 { + return &sqlserverflexbeta.GetFlavorsResponse{ + Flavors: &[]sqlserverflexbeta.ListFlavors{ + {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")}, + }, + }, nil + } + + return &sqlserverflexbeta.GetFlavorsResponse{ + Flavors: &[]sqlserverflexbeta.ListFlavors{}, + }, nil +} + +func TestGetFlavorsByFilter(t *testing.T) { + tests := []struct { + description string + projectId string + region string + mockErr error + filter func(sqlserverflexbeta.ListFlavors) bool + wantCount int + wantErr bool + }{ + { + description: "Success - Get all flavors (2 pages)", + projectId: "pid", region: "reg", + filter: func(_ sqlserverflexbeta.ListFlavors) bool { return true }, + wantCount: 3, + wantErr: false, + }, + { + description: "Success - Filter flavors by description", + projectId: "pid", region: "reg", + filter: func(f sqlserverflexbeta.ListFlavors) bool { return *f.Description == "first" }, + wantCount: 1, + wantErr: false, + }, + { + description: "Error - Missing parameters", + projectId: "", region: "reg", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + var currentPage int64 + client := &mockFlavorsClient{ + executeRequest: func() sqlserverflexbeta.ApiGetFlavorsRequestRequest { + return &mockRequest{ + executeFunc: func() (*sqlserverflexbeta.GetFlavorsResponse, error) { + currentPage++ + return mockResp(currentPage) + }, + } + }, + } + actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter) + + if (err != nil) != tt.wantErr { + t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !tt.wantErr && len(actual) != tt.wantCount { + t.Errorf("getFlavorsByFilter() got %d flavors, want %d", len(actual), tt.wantCount) + } + }, + ) + } +} + +func TestGetAllFlavors(t *testing.T) { + var currentPage int64 + client := &mockFlavorsClient{ + executeRequest: func() sqlserverflexbeta.ApiGetFlavorsRequestRequest { + return &mockRequest{ + executeFunc: func() (*sqlserverflexbeta.GetFlavorsResponse, error) { + currentPage++ + return mockResp(currentPage) + }, + } + }, + } + + res, err := getAllFlavors(context.Background(), client, "pid", "reg") + if err != nil { + t.Errorf("getAllFlavors() unexpected error: %v", err) + } + if len(res) != 3 { + t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res)) + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go new file mode 100644 index 00000000..41b1aad8 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go @@ -0,0 +1,118 @@ +package sqlserverflexbeta + +import ( + "context" + "fmt" + "net/http" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + + sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" + + sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen" +) + +var _ datasource.DataSource = (*flavorsDataSource)(nil) + +const errorPrefix = "[Sqlserverflexbeta - Flavors]" + +func NewFlavorsDataSource() datasource.DataSource { + return &flavorsDataSource{} +} + +type flavorsDataSource struct { + client *sqlserverflexbetaPkg.APIClient + providerData core.ProviderData +} + +func (d *flavorsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavors" +} + +func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = sqlserverflexbetaGen.FlavorsDataSourceSchema(ctx) +} + +// Configure adds the provider configured client to the data source. +func (d *flavorsDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { + var ok bool + d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + d.client = apiClient + tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) +} + +func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data sqlserverflexbetaGen.FlavorsModel + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := data.ProjectId.ValueString() + region := d.providerData.GetRegionWithOverride(data.Region) + flavorsId := data.FlavorsId.ValueString() + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "flavors_id", flavorsId) + + flavorsResp, err := d.client.GetFlavorsRequest(ctx, projectId, region, flavorsId).Execute() + if err != nil { + utils.LogError( + ctx, + &resp.Diagnostics, + err, + "Reading flavors", + fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId), + map[int]string{ + http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), + }, + ) + resp.State.RemoveResource(ctx) + return + } + + ctx = core.LogResponse(ctx) + + // Todo: Read API call logic + + // Example data value setting + // data.Id = types.StringValue("example-id") + + err = mapResponseToModel(ctx, flavorsResp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + fmt.Sprintf("%s Read", errorPrefix), + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go new file mode 100644 index 00000000..94b526be --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go @@ -0,0 +1,1909 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func FlavorsDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "flavors": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "cpu": schema.Int64Attribute{ + Computed: true, + Description: "The cpu count of the instance.", + MarkdownDescription: "The cpu count of the instance.", + }, + "description": schema.StringAttribute{ + Computed: true, + Description: "The flavor description.", + MarkdownDescription: "The flavor description.", + }, + "id": schema.StringAttribute{ + Computed: true, + Description: "The id of the instance flavor.", + MarkdownDescription: "The id of the instance flavor.", + }, + "max_gb": schema.Int64Attribute{ + Computed: true, + Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + }, + "memory": schema.Int64Attribute{ + Computed: true, + Description: "The memory of the instance in Gibibyte.", + MarkdownDescription: "The memory of the instance in Gibibyte.", + }, + "min_gb": schema.Int64Attribute{ + Computed: true, + Description: "minimum storage which is required to order in Gigabyte.", + MarkdownDescription: "minimum storage which is required to order in Gigabyte.", + }, + "node_type": schema.StringAttribute{ + Computed: true, + Description: "defines the nodeType it can be either single or HA", + MarkdownDescription: "defines the nodeType it can be either single or HA", + }, + "storage_classes": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "class": schema.StringAttribute{ + Computed: true, + }, + "max_io_per_sec": schema.Int64Attribute{ + Computed: true, + }, + "max_through_in_mb": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: StorageClassesType{ + ObjectType: types.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + }, + }, + CustomType: FlavorsType{ + ObjectType: types.ObjectType{ + AttrTypes: FlavorsValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "List of flavors available for the project.", + MarkdownDescription: "List of flavors available for the project.", + }, + "page": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of the page of items list to be returned.", + MarkdownDescription: "Number of the page of items list to be returned.", + }, + "pagination": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "page": schema.Int64Attribute{ + Computed: true, + }, + "size": schema.Int64Attribute{ + Computed: true, + }, + "sort": schema.StringAttribute{ + Computed: true, + }, + "total_pages": schema.Int64Attribute{ + Computed: true, + }, + "total_rows": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: PaginationType{ + ObjectType: types.ObjectType{ + AttrTypes: PaginationValue{}.AttributeTypes(ctx), + }, + }, + Computed: true, + }, + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "size": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of items to be returned on each page.", + MarkdownDescription: "Number of items to be returned on each page.", + }, + "sort": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "Sorting of the flavors to be returned on each page.", + MarkdownDescription: "Sorting of the flavors to be returned on each page.", + Validators: []validator.String{ + stringvalidator.OneOf( + "index.desc", + "index.asc", + "cpu.desc", + "cpu.asc", + "flavor_description.asc", + "flavor_description.desc", + "id.desc", + "id.asc", + "size_max.desc", + "size_max.asc", + "ram.desc", + "ram.asc", + "size_min.desc", + "size_min.asc", + "storage_class.asc", + "storage_class.desc", + "node_type.asc", + "node_type.desc", + ), + }, + }, + }, + } +} + +type FlavorsModel struct { + Flavors types.List `tfsdk:"flavors"` + Page types.Int64 `tfsdk:"page"` + Pagination PaginationValue `tfsdk:"pagination"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Size types.Int64 `tfsdk:"size"` + Sort types.String `tfsdk:"sort"` +} + +var _ basetypes.ObjectTypable = FlavorsType{} + +type FlavorsType struct { + basetypes.ObjectType +} + +func (t FlavorsType) Equal(o attr.Type) bool { + other, ok := o.(FlavorsType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t FlavorsType) String() string { + return "FlavorsType" +} + +func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + cpuAttribute, ok := attributes["cpu"] + + if !ok { + diags.AddError( + "Attribute Missing", + `cpu is missing from object`) + + return nil, diags + } + + cpuVal, ok := cpuAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute)) + } + + descriptionAttribute, ok := attributes["description"] + + if !ok { + diags.AddError( + "Attribute Missing", + `description is missing from object`) + + return nil, diags + } + + descriptionVal, ok := descriptionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute)) + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return nil, diags + } + + idVal, ok := idAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) + } + + maxGbAttribute, ok := attributes["max_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_gb is missing from object`) + + return nil, diags + } + + maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute)) + } + + memoryAttribute, ok := attributes["memory"] + + if !ok { + diags.AddError( + "Attribute Missing", + `memory is missing from object`) + + return nil, diags + } + + memoryVal, ok := memoryAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute)) + } + + minGbAttribute, ok := attributes["min_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `min_gb is missing from object`) + + return nil, diags + } + + minGbVal, ok := minGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute)) + } + + nodeTypeAttribute, ok := attributes["node_type"] + + if !ok { + diags.AddError( + "Attribute Missing", + `node_type is missing from object`) + + return nil, diags + } + + nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute)) + } + + storageClassesAttribute, ok := attributes["storage_classes"] + + if !ok { + diags.AddError( + "Attribute Missing", + `storage_classes is missing from object`) + + return nil, diags + } + + storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return FlavorsValue{ + Cpu: cpuVal, + Description: descriptionVal, + Id: idVal, + MaxGb: maxGbVal, + Memory: memoryVal, + MinGb: minGbVal, + NodeType: nodeTypeVal, + StorageClasses: storageClassesVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewFlavorsValueNull() FlavorsValue { + return FlavorsValue{ + state: attr.ValueStateNull, + } +} + +func NewFlavorsValueUnknown() FlavorsValue { + return FlavorsValue{ + state: attr.ValueStateUnknown, + } +} + +func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing FlavorsValue Attribute Value", + "While creating a FlavorsValue value, a missing attribute value was detected. "+ + "A FlavorsValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid FlavorsValue Attribute Type", + "While creating a FlavorsValue value, an invalid attribute value was detected. "+ + "A FlavorsValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra FlavorsValue Attribute Value", + "While creating a FlavorsValue value, an extra attribute value was detected. "+ + "A FlavorsValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewFlavorsValueUnknown(), diags + } + + cpuAttribute, ok := attributes["cpu"] + + if !ok { + diags.AddError( + "Attribute Missing", + `cpu is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + cpuVal, ok := cpuAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute)) + } + + descriptionAttribute, ok := attributes["description"] + + if !ok { + diags.AddError( + "Attribute Missing", + `description is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + descriptionVal, ok := descriptionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute)) + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + idVal, ok := idAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) + } + + maxGbAttribute, ok := attributes["max_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_gb is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute)) + } + + memoryAttribute, ok := attributes["memory"] + + if !ok { + diags.AddError( + "Attribute Missing", + `memory is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + memoryVal, ok := memoryAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute)) + } + + minGbAttribute, ok := attributes["min_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `min_gb is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + minGbVal, ok := minGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute)) + } + + nodeTypeAttribute, ok := attributes["node_type"] + + if !ok { + diags.AddError( + "Attribute Missing", + `node_type is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute)) + } + + storageClassesAttribute, ok := attributes["storage_classes"] + + if !ok { + diags.AddError( + "Attribute Missing", + `storage_classes is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute)) + } + + if diags.HasError() { + return NewFlavorsValueUnknown(), diags + } + + return FlavorsValue{ + Cpu: cpuVal, + Description: descriptionVal, + Id: idVal, + MaxGb: maxGbVal, + Memory: memoryVal, + MinGb: minGbVal, + NodeType: nodeTypeVal, + StorageClasses: storageClassesVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue { + object, diags := NewFlavorsValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewFlavorsValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewFlavorsValueUnknown(), nil + } + + if in.IsNull() { + return NewFlavorsValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t FlavorsType) ValueType(ctx context.Context) attr.Value { + return FlavorsValue{} +} + +var _ basetypes.ObjectValuable = FlavorsValue{} + +type FlavorsValue struct { + Cpu basetypes.Int64Value `tfsdk:"cpu"` + Description basetypes.StringValue `tfsdk:"description"` + Id basetypes.StringValue `tfsdk:"id"` + MaxGb basetypes.Int64Value `tfsdk:"max_gb"` + Memory basetypes.Int64Value `tfsdk:"memory"` + MinGb basetypes.Int64Value `tfsdk:"min_gb"` + NodeType basetypes.StringValue `tfsdk:"node_type"` + StorageClasses basetypes.ListValue `tfsdk:"storage_classes"` + state attr.ValueState +} + +func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 8) + + var val tftypes.Value + var err error + + attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["storage_classes"] = basetypes.ListType{ + ElemType: StorageClassesValue{}.Type(ctx), + }.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 8) + + val, err = v.Cpu.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["cpu"] = val + + val, err = v.Description.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["description"] = val + + val, err = v.Id.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["id"] = val + + val, err = v.MaxGb.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["max_gb"] = val + + val, err = v.Memory.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["memory"] = val + + val, err = v.MinGb.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["min_gb"] = val + + val, err = v.NodeType.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["node_type"] = val + + val, err = v.StorageClasses.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["storage_classes"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v FlavorsValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v FlavorsValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v FlavorsValue) String() string { + return "FlavorsValue" +} + +func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + storageClasses := types.ListValueMust( + StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + v.StorageClasses.Elements(), + ) + + if v.StorageClasses.IsNull() { + storageClasses = types.ListNull( + StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + ) + } + + if v.StorageClasses.IsUnknown() { + storageClasses = types.ListUnknown( + StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + ) + } + + attributeTypes := map[string]attr.Type{ + "cpu": basetypes.Int64Type{}, + "description": basetypes.StringType{}, + "id": basetypes.StringType{}, + "max_gb": basetypes.Int64Type{}, + "memory": basetypes.Int64Type{}, + "min_gb": basetypes.Int64Type{}, + "node_type": basetypes.StringType{}, + "storage_classes": basetypes.ListType{ + ElemType: StorageClassesValue{}.Type(ctx), + }, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "cpu": v.Cpu, + "description": v.Description, + "id": v.Id, + "max_gb": v.MaxGb, + "memory": v.Memory, + "min_gb": v.MinGb, + "node_type": v.NodeType, + "storage_classes": storageClasses, + }) + + return objVal, diags +} + +func (v FlavorsValue) Equal(o attr.Value) bool { + other, ok := o.(FlavorsValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Cpu.Equal(other.Cpu) { + return false + } + + if !v.Description.Equal(other.Description) { + return false + } + + if !v.Id.Equal(other.Id) { + return false + } + + if !v.MaxGb.Equal(other.MaxGb) { + return false + } + + if !v.Memory.Equal(other.Memory) { + return false + } + + if !v.MinGb.Equal(other.MinGb) { + return false + } + + if !v.NodeType.Equal(other.NodeType) { + return false + } + + if !v.StorageClasses.Equal(other.StorageClasses) { + return false + } + + return true +} + +func (v FlavorsValue) Type(ctx context.Context) attr.Type { + return FlavorsType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "cpu": basetypes.Int64Type{}, + "description": basetypes.StringType{}, + "id": basetypes.StringType{}, + "max_gb": basetypes.Int64Type{}, + "memory": basetypes.Int64Type{}, + "min_gb": basetypes.Int64Type{}, + "node_type": basetypes.StringType{}, + "storage_classes": basetypes.ListType{ + ElemType: StorageClassesValue{}.Type(ctx), + }, + } +} + +var _ basetypes.ObjectTypable = StorageClassesType{} + +type StorageClassesType struct { + basetypes.ObjectType +} + +func (t StorageClassesType) Equal(o attr.Type) bool { + other, ok := o.(StorageClassesType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t StorageClassesType) String() string { + return "StorageClassesType" +} + +func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return nil, diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + maxIoPerSecAttribute, ok := attributes["max_io_per_sec"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_io_per_sec is missing from object`) + + return nil, diags + } + + maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute)) + } + + maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_through_in_mb is missing from object`) + + return nil, diags + } + + maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return StorageClassesValue{ + Class: classVal, + MaxIoPerSec: maxIoPerSecVal, + MaxThroughInMb: maxThroughInMbVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageClassesValueNull() StorageClassesValue { + return StorageClassesValue{ + state: attr.ValueStateNull, + } +} + +func NewStorageClassesValueUnknown() StorageClassesValue { + return StorageClassesValue{ + state: attr.ValueStateUnknown, + } +} + +func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing StorageClassesValue Attribute Value", + "While creating a StorageClassesValue value, a missing attribute value was detected. "+ + "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid StorageClassesValue Attribute Type", + "While creating a StorageClassesValue value, an invalid attribute value was detected. "+ + "A StorageClassesValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra StorageClassesValue Attribute Value", + "While creating a StorageClassesValue value, an extra attribute value was detected. "+ + "A StorageClassesValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewStorageClassesValueUnknown(), diags + } + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return NewStorageClassesValueUnknown(), diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + maxIoPerSecAttribute, ok := attributes["max_io_per_sec"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_io_per_sec is missing from object`) + + return NewStorageClassesValueUnknown(), diags + } + + maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute)) + } + + maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_through_in_mb is missing from object`) + + return NewStorageClassesValueUnknown(), diags + } + + maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute)) + } + + if diags.HasError() { + return NewStorageClassesValueUnknown(), diags + } + + return StorageClassesValue{ + Class: classVal, + MaxIoPerSec: maxIoPerSecVal, + MaxThroughInMb: maxThroughInMbVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue { + object, diags := NewStorageClassesValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewStorageClassesValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewStorageClassesValueUnknown(), nil + } + + if in.IsNull() { + return NewStorageClassesValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t StorageClassesType) ValueType(ctx context.Context) attr.Value { + return StorageClassesValue{} +} + +var _ basetypes.ObjectValuable = StorageClassesValue{} + +type StorageClassesValue struct { + Class basetypes.StringValue `tfsdk:"class"` + MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"` + MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"` + state attr.ValueState +} + +func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 3) + + var val tftypes.Value + var err error + + attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 3) + + val, err = v.Class.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["class"] = val + + val, err = v.MaxIoPerSec.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["max_io_per_sec"] = val + + val, err = v.MaxThroughInMb.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["max_through_in_mb"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v StorageClassesValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v StorageClassesValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v StorageClassesValue) String() string { + return "StorageClassesValue" +} + +func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "class": basetypes.StringType{}, + "max_io_per_sec": basetypes.Int64Type{}, + "max_through_in_mb": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "class": v.Class, + "max_io_per_sec": v.MaxIoPerSec, + "max_through_in_mb": v.MaxThroughInMb, + }) + + return objVal, diags +} + +func (v StorageClassesValue) Equal(o attr.Value) bool { + other, ok := o.(StorageClassesValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Class.Equal(other.Class) { + return false + } + + if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) { + return false + } + + if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) { + return false + } + + return true +} + +func (v StorageClassesValue) Type(ctx context.Context) attr.Type { + return StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "class": basetypes.StringType{}, + "max_io_per_sec": basetypes.Int64Type{}, + "max_through_in_mb": basetypes.Int64Type{}, + } +} + +var _ basetypes.ObjectTypable = PaginationType{} + +type PaginationType struct { + basetypes.ObjectType +} + +func (t PaginationType) Equal(o attr.Type) bool { + other, ok := o.(PaginationType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t PaginationType) String() string { + return "PaginationType" +} + +func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return nil, diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return nil, diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return nil, diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return nil, diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return nil, diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueNull() PaginationValue { + return PaginationValue{ + state: attr.ValueStateNull, + } +} + +func NewPaginationValueUnknown() PaginationValue { + return PaginationValue{ + state: attr.ValueStateUnknown, + } +} + +func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing PaginationValue Attribute Value", + "While creating a PaginationValue value, a missing attribute value was detected. "+ + "A PaginationValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid PaginationValue Attribute Type", + "While creating a PaginationValue value, an invalid attribute value was detected. "+ + "A PaginationValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra PaginationValue Attribute Value", + "While creating a PaginationValue value, an extra attribute value was detected. "+ + "A PaginationValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue { + object, diags := NewPaginationValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewPaginationValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewPaginationValueUnknown(), nil + } + + if in.IsNull() { + return NewPaginationValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t PaginationType) ValueType(ctx context.Context) attr.Value { + return PaginationValue{} +} + +var _ basetypes.ObjectValuable = PaginationValue{} + +type PaginationValue struct { + Page basetypes.Int64Value `tfsdk:"page"` + Size basetypes.Int64Value `tfsdk:"size"` + Sort basetypes.StringValue `tfsdk:"sort"` + TotalPages basetypes.Int64Value `tfsdk:"total_pages"` + TotalRows basetypes.Int64Value `tfsdk:"total_rows"` + state attr.ValueState +} + +func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 5) + + var val tftypes.Value + var err error + + attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 5) + + val, err = v.Page.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["page"] = val + + val, err = v.Size.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["size"] = val + + val, err = v.Sort.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["sort"] = val + + val, err = v.TotalPages.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_pages"] = val + + val, err = v.TotalRows.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_rows"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v PaginationValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v PaginationValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v PaginationValue) String() string { + return "PaginationValue" +} + +func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "page": v.Page, + "size": v.Size, + "sort": v.Sort, + "total_pages": v.TotalPages, + "total_rows": v.TotalRows, + }) + + return objVal, diags +} + +func (v PaginationValue) Equal(o attr.Value) bool { + other, ok := o.(PaginationValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Page.Equal(other.Page) { + return false + } + + if !v.Size.Equal(other.Size) { + return false + } + + if !v.Sort.Equal(other.Sort) { + return false + } + + if !v.TotalPages.Equal(other.TotalPages) { + return false + } + + if !v.TotalRows.Equal(other.TotalRows) { + return false + } + + return true +} + +func (v PaginationValue) Type(ctx context.Context) attr.Type { + return PaginationType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go new file mode 100644 index 00000000..842a4cfd --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go @@ -0,0 +1,134 @@ +package sqlserverflexbeta + +import ( + "context" + "fmt" + "net/http" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + + sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen" +) + +var _ datasource.DataSource = (*instanceDataSource)(nil) + +const errorPrefix = "[Sqlserverflexbeta - Instance]" + +func NewInstanceDataSource() datasource.DataSource { + return &instanceDataSource{} +} + +type instanceDataSource struct { + client *sqlserverflexbetaPkg.APIClient + providerData core.ProviderData +} + +func (d *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance" +} + +func (d *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = sqlserverflexbetaGen.InstanceDataSourceSchema(ctx) +} + +// Configure adds the provider configured client to the data source. +func (d *instanceDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { + var ok bool + d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(d.providerData.RoundTripper), + utils.UserAgentConfigOption(d.providerData.Version), + } + if d.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithRegion(d.providerData.GetRegion()), + ) + } + apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return + } + d.client = apiClient + tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) +} + +func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data sqlserverflexbetaGen.InstanceModel + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := data.ProjectId.ValueString() + region := d.providerData.GetRegionWithOverride(data.Region) + instanceId := data.InstanceId.ValueString() + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + instanceResp, err := d.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() + if err != nil { + utils.LogError( + ctx, + &resp.Diagnostics, + err, + "Reading instance", + fmt.Sprintf("instance with ID %q does not exist in project %q.", instanceId, projectId), + map[int]string{ + http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), + }, + ) + resp.State.RemoveResource(ctx) + return + } + + ctx = core.LogResponse(ctx) + + err = mapDataResponseToModel(ctx, instanceResp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + fmt.Sprintf("%s Read", errorPrefix), + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go new file mode 100644 index 00000000..87476c3c --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go @@ -0,0 +1,1579 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func InstanceDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "backup_schedule": schema.StringAttribute{ + Computed: true, + Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.", + MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.", + }, + "edition": schema.StringAttribute{ + Computed: true, + Description: "Edition of the MSSQL server instance", + MarkdownDescription: "Edition of the MSSQL server instance", + }, + "encryption": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "kek_key_id": schema.StringAttribute{ + Computed: true, + Description: "The key identifier", + MarkdownDescription: "The key identifier", + }, + "kek_key_ring_id": schema.StringAttribute{ + Computed: true, + Description: "The keyring identifier", + MarkdownDescription: "The keyring identifier", + }, + "kek_key_version": schema.StringAttribute{ + Computed: true, + Description: "The key version", + MarkdownDescription: "The key version", + }, + "service_account": schema.StringAttribute{ + Computed: true, + }, + }, + CustomType: EncryptionType{ + ObjectType: types.ObjectType{ + AttrTypes: EncryptionValue{}.AttributeTypes(ctx), + }, + }, + Computed: true, + Description: "this defines which key to use for storage encryption", + MarkdownDescription: "this defines which key to use for storage encryption", + }, + "flavor_id": schema.StringAttribute{ + Computed: true, + Description: "The id of the instance flavor.", + MarkdownDescription: "The id of the instance flavor.", + }, + "id": schema.StringAttribute{ + Computed: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "instance_id": schema.StringAttribute{ + Required: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "is_deletable": schema.BoolAttribute{ + Computed: true, + Description: "Whether the instance can be deleted or not.", + MarkdownDescription: "Whether the instance can be deleted or not.", + }, + "name": schema.StringAttribute{ + Computed: true, + Description: "The name of the instance.", + MarkdownDescription: "The name of the instance.", + }, + "network": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "access_scope": schema.StringAttribute{ + Computed: true, + Description: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n", + MarkdownDescription: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n", + }, + "acl": schema.ListAttribute{ + ElementType: types.StringType, + Computed: true, + Description: "List of IPV4 cidr.", + MarkdownDescription: "List of IPV4 cidr.", + }, + "instance_address": schema.StringAttribute{ + Computed: true, + }, + "router_address": schema.StringAttribute{ + Computed: true, + }, + }, + CustomType: NetworkType{ + ObjectType: types.ObjectType{ + AttrTypes: NetworkValue{}.AttributeTypes(ctx), + }, + }, + Computed: true, + Description: "The access configuration of the instance", + MarkdownDescription: "The access configuration of the instance", + }, + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "replicas": schema.Int64Attribute{ + Computed: true, + Description: "How many replicas the instance should have.", + MarkdownDescription: "How many replicas the instance should have.", + }, + "retention_days": schema.Int64Attribute{ + Computed: true, + Description: "The days for how long the backup files should be stored before cleaned up. 30 to 365", + MarkdownDescription: "The days for how long the backup files should be stored before cleaned up. 30 to 365", + }, + "status": schema.StringAttribute{ + Computed: true, + }, + "storage": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "class": schema.StringAttribute{ + Computed: true, + Description: "The storage class for the storage.", + MarkdownDescription: "The storage class for the storage.", + }, + "size": schema.Int64Attribute{ + Computed: true, + Description: "The storage size in Gigabytes.", + MarkdownDescription: "The storage size in Gigabytes.", + }, + }, + CustomType: StorageType{ + ObjectType: types.ObjectType{ + AttrTypes: StorageValue{}.AttributeTypes(ctx), + }, + }, + Computed: true, + Description: "The object containing information about the storage size and class.", + MarkdownDescription: "The object containing information about the storage size and class.", + }, + "version": schema.StringAttribute{ + Computed: true, + Description: "The sqlserver version used for the instance.", + MarkdownDescription: "The sqlserver version used for the instance.", + }, + }, + } +} + +type InstanceModel struct { + BackupSchedule types.String `tfsdk:"backup_schedule"` + Edition types.String `tfsdk:"edition"` + Encryption EncryptionValue `tfsdk:"encryption"` + FlavorId types.String `tfsdk:"flavor_id"` + Id types.String `tfsdk:"id"` + InstanceId types.String `tfsdk:"instance_id"` + IsDeletable types.Bool `tfsdk:"is_deletable"` + Name types.String `tfsdk:"name"` + Network NetworkValue `tfsdk:"network"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Replicas types.Int64 `tfsdk:"replicas"` + RetentionDays types.Int64 `tfsdk:"retention_days"` + Status types.String `tfsdk:"status"` + Storage StorageValue `tfsdk:"storage"` + Version types.String `tfsdk:"version"` +} + +var _ basetypes.ObjectTypable = EncryptionType{} + +type EncryptionType struct { + basetypes.ObjectType +} + +func (t EncryptionType) Equal(o attr.Type) bool { + other, ok := o.(EncryptionType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t EncryptionType) String() string { + return "EncryptionType" +} + +func (t EncryptionType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + kekKeyIdAttribute, ok := attributes["kek_key_id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_id is missing from object`) + + return nil, diags + } + + kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute)) + } + + kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_ring_id is missing from object`) + + return nil, diags + } + + kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute)) + } + + kekKeyVersionAttribute, ok := attributes["kek_key_version"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_version is missing from object`) + + return nil, diags + } + + kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute)) + } + + serviceAccountAttribute, ok := attributes["service_account"] + + if !ok { + diags.AddError( + "Attribute Missing", + `service_account is missing from object`) + + return nil, diags + } + + serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return EncryptionValue{ + KekKeyId: kekKeyIdVal, + KekKeyRingId: kekKeyRingIdVal, + KekKeyVersion: kekKeyVersionVal, + ServiceAccount: serviceAccountVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewEncryptionValueNull() EncryptionValue { + return EncryptionValue{ + state: attr.ValueStateNull, + } +} + +func NewEncryptionValueUnknown() EncryptionValue { + return EncryptionValue{ + state: attr.ValueStateUnknown, + } +} + +func NewEncryptionValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (EncryptionValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing EncryptionValue Attribute Value", + "While creating a EncryptionValue value, a missing attribute value was detected. "+ + "A EncryptionValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid EncryptionValue Attribute Type", + "While creating a EncryptionValue value, an invalid attribute value was detected. "+ + "A EncryptionValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("EncryptionValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra EncryptionValue Attribute Value", + "While creating a EncryptionValue value, an extra attribute value was detected. "+ + "A EncryptionValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra EncryptionValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewEncryptionValueUnknown(), diags + } + + kekKeyIdAttribute, ok := attributes["kek_key_id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_id is missing from object`) + + return NewEncryptionValueUnknown(), diags + } + + kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute)) + } + + kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_ring_id is missing from object`) + + return NewEncryptionValueUnknown(), diags + } + + kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute)) + } + + kekKeyVersionAttribute, ok := attributes["kek_key_version"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_version is missing from object`) + + return NewEncryptionValueUnknown(), diags + } + + kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute)) + } + + serviceAccountAttribute, ok := attributes["service_account"] + + if !ok { + diags.AddError( + "Attribute Missing", + `service_account is missing from object`) + + return NewEncryptionValueUnknown(), diags + } + + serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute)) + } + + if diags.HasError() { + return NewEncryptionValueUnknown(), diags + } + + return EncryptionValue{ + KekKeyId: kekKeyIdVal, + KekKeyRingId: kekKeyRingIdVal, + KekKeyVersion: kekKeyVersionVal, + ServiceAccount: serviceAccountVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewEncryptionValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) EncryptionValue { + object, diags := NewEncryptionValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewEncryptionValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t EncryptionType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewEncryptionValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewEncryptionValueUnknown(), nil + } + + if in.IsNull() { + return NewEncryptionValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewEncryptionValueMust(EncryptionValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t EncryptionType) ValueType(ctx context.Context) attr.Value { + return EncryptionValue{} +} + +var _ basetypes.ObjectValuable = EncryptionValue{} + +type EncryptionValue struct { + KekKeyId basetypes.StringValue `tfsdk:"kek_key_id"` + KekKeyRingId basetypes.StringValue `tfsdk:"kek_key_ring_id"` + KekKeyVersion basetypes.StringValue `tfsdk:"kek_key_version"` + ServiceAccount basetypes.StringValue `tfsdk:"service_account"` + state attr.ValueState +} + +func (v EncryptionValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 4) + + var val tftypes.Value + var err error + + attrTypes["kek_key_id"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["kek_key_ring_id"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["kek_key_version"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["service_account"] = basetypes.StringType{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 4) + + val, err = v.KekKeyId.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["kek_key_id"] = val + + val, err = v.KekKeyRingId.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["kek_key_ring_id"] = val + + val, err = v.KekKeyVersion.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["kek_key_version"] = val + + val, err = v.ServiceAccount.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["service_account"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v EncryptionValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v EncryptionValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v EncryptionValue) String() string { + return "EncryptionValue" +} + +func (v EncryptionValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "kek_key_id": basetypes.StringType{}, + "kek_key_ring_id": basetypes.StringType{}, + "kek_key_version": basetypes.StringType{}, + "service_account": basetypes.StringType{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "kek_key_id": v.KekKeyId, + "kek_key_ring_id": v.KekKeyRingId, + "kek_key_version": v.KekKeyVersion, + "service_account": v.ServiceAccount, + }) + + return objVal, diags +} + +func (v EncryptionValue) Equal(o attr.Value) bool { + other, ok := o.(EncryptionValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.KekKeyId.Equal(other.KekKeyId) { + return false + } + + if !v.KekKeyRingId.Equal(other.KekKeyRingId) { + return false + } + + if !v.KekKeyVersion.Equal(other.KekKeyVersion) { + return false + } + + if !v.ServiceAccount.Equal(other.ServiceAccount) { + return false + } + + return true +} + +func (v EncryptionValue) Type(ctx context.Context) attr.Type { + return EncryptionType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v EncryptionValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "kek_key_id": basetypes.StringType{}, + "kek_key_ring_id": basetypes.StringType{}, + "kek_key_version": basetypes.StringType{}, + "service_account": basetypes.StringType{}, + } +} + +var _ basetypes.ObjectTypable = NetworkType{} + +type NetworkType struct { + basetypes.ObjectType +} + +func (t NetworkType) Equal(o attr.Type) bool { + other, ok := o.(NetworkType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t NetworkType) String() string { + return "NetworkType" +} + +func (t NetworkType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + accessScopeAttribute, ok := attributes["access_scope"] + + if !ok { + diags.AddError( + "Attribute Missing", + `access_scope is missing from object`) + + return nil, diags + } + + accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute)) + } + + aclAttribute, ok := attributes["acl"] + + if !ok { + diags.AddError( + "Attribute Missing", + `acl is missing from object`) + + return nil, diags + } + + aclVal, ok := aclAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute)) + } + + instanceAddressAttribute, ok := attributes["instance_address"] + + if !ok { + diags.AddError( + "Attribute Missing", + `instance_address is missing from object`) + + return nil, diags + } + + instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute)) + } + + routerAddressAttribute, ok := attributes["router_address"] + + if !ok { + diags.AddError( + "Attribute Missing", + `router_address is missing from object`) + + return nil, diags + } + + routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return NetworkValue{ + AccessScope: accessScopeVal, + Acl: aclVal, + InstanceAddress: instanceAddressVal, + RouterAddress: routerAddressVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewNetworkValueNull() NetworkValue { + return NetworkValue{ + state: attr.ValueStateNull, + } +} + +func NewNetworkValueUnknown() NetworkValue { + return NetworkValue{ + state: attr.ValueStateUnknown, + } +} + +func NewNetworkValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (NetworkValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing NetworkValue Attribute Value", + "While creating a NetworkValue value, a missing attribute value was detected. "+ + "A NetworkValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid NetworkValue Attribute Type", + "While creating a NetworkValue value, an invalid attribute value was detected. "+ + "A NetworkValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("NetworkValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra NetworkValue Attribute Value", + "While creating a NetworkValue value, an extra attribute value was detected. "+ + "A NetworkValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra NetworkValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewNetworkValueUnknown(), diags + } + + accessScopeAttribute, ok := attributes["access_scope"] + + if !ok { + diags.AddError( + "Attribute Missing", + `access_scope is missing from object`) + + return NewNetworkValueUnknown(), diags + } + + accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute)) + } + + aclAttribute, ok := attributes["acl"] + + if !ok { + diags.AddError( + "Attribute Missing", + `acl is missing from object`) + + return NewNetworkValueUnknown(), diags + } + + aclVal, ok := aclAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute)) + } + + instanceAddressAttribute, ok := attributes["instance_address"] + + if !ok { + diags.AddError( + "Attribute Missing", + `instance_address is missing from object`) + + return NewNetworkValueUnknown(), diags + } + + instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute)) + } + + routerAddressAttribute, ok := attributes["router_address"] + + if !ok { + diags.AddError( + "Attribute Missing", + `router_address is missing from object`) + + return NewNetworkValueUnknown(), diags + } + + routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute)) + } + + if diags.HasError() { + return NewNetworkValueUnknown(), diags + } + + return NetworkValue{ + AccessScope: accessScopeVal, + Acl: aclVal, + InstanceAddress: instanceAddressVal, + RouterAddress: routerAddressVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewNetworkValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) NetworkValue { + object, diags := NewNetworkValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewNetworkValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t NetworkType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewNetworkValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewNetworkValueUnknown(), nil + } + + if in.IsNull() { + return NewNetworkValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewNetworkValueMust(NetworkValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t NetworkType) ValueType(ctx context.Context) attr.Value { + return NetworkValue{} +} + +var _ basetypes.ObjectValuable = NetworkValue{} + +type NetworkValue struct { + AccessScope basetypes.StringValue `tfsdk:"access_scope"` + Acl basetypes.ListValue `tfsdk:"acl"` + InstanceAddress basetypes.StringValue `tfsdk:"instance_address"` + RouterAddress basetypes.StringValue `tfsdk:"router_address"` + state attr.ValueState +} + +func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 4) + + var val tftypes.Value + var err error + + attrTypes["access_scope"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["acl"] = basetypes.ListType{ + ElemType: types.StringType, + }.TerraformType(ctx) + attrTypes["instance_address"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["router_address"] = basetypes.StringType{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 4) + + val, err = v.AccessScope.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["access_scope"] = val + + val, err = v.Acl.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["acl"] = val + + val, err = v.InstanceAddress.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["instance_address"] = val + + val, err = v.RouterAddress.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["router_address"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v NetworkValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v NetworkValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v NetworkValue) String() string { + return "NetworkValue" +} + +func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + var aclVal basetypes.ListValue + switch { + case v.Acl.IsUnknown(): + aclVal = types.ListUnknown(types.StringType) + case v.Acl.IsNull(): + aclVal = types.ListNull(types.StringType) + default: + var d diag.Diagnostics + aclVal, d = types.ListValue(types.StringType, v.Acl.Elements()) + diags.Append(d...) + } + + if diags.HasError() { + return types.ObjectUnknown(map[string]attr.Type{ + "access_scope": basetypes.StringType{}, + "acl": basetypes.ListType{ + ElemType: types.StringType, + }, + "instance_address": basetypes.StringType{}, + "router_address": basetypes.StringType{}, + }), diags + } + + attributeTypes := map[string]attr.Type{ + "access_scope": basetypes.StringType{}, + "acl": basetypes.ListType{ + ElemType: types.StringType, + }, + "instance_address": basetypes.StringType{}, + "router_address": basetypes.StringType{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "access_scope": v.AccessScope, + "acl": aclVal, + "instance_address": v.InstanceAddress, + "router_address": v.RouterAddress, + }) + + return objVal, diags +} + +func (v NetworkValue) Equal(o attr.Value) bool { + other, ok := o.(NetworkValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.AccessScope.Equal(other.AccessScope) { + return false + } + + if !v.Acl.Equal(other.Acl) { + return false + } + + if !v.InstanceAddress.Equal(other.InstanceAddress) { + return false + } + + if !v.RouterAddress.Equal(other.RouterAddress) { + return false + } + + return true +} + +func (v NetworkValue) Type(ctx context.Context) attr.Type { + return NetworkType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v NetworkValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "access_scope": basetypes.StringType{}, + "acl": basetypes.ListType{ + ElemType: types.StringType, + }, + "instance_address": basetypes.StringType{}, + "router_address": basetypes.StringType{}, + } +} + +var _ basetypes.ObjectTypable = StorageType{} + +type StorageType struct { + basetypes.ObjectType +} + +func (t StorageType) Equal(o attr.Type) bool { + other, ok := o.(StorageType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t StorageType) String() string { + return "StorageType" +} + +func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return nil, diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return nil, diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return StorageValue{ + Class: classVal, + Size: sizeVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageValueNull() StorageValue { + return StorageValue{ + state: attr.ValueStateNull, + } +} + +func NewStorageValueUnknown() StorageValue { + return StorageValue{ + state: attr.ValueStateUnknown, + } +} + +func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing StorageValue Attribute Value", + "While creating a StorageValue value, a missing attribute value was detected. "+ + "A StorageValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid StorageValue Attribute Type", + "While creating a StorageValue value, an invalid attribute value was detected. "+ + "A StorageValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("StorageValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra StorageValue Attribute Value", + "While creating a StorageValue value, an extra attribute value was detected. "+ + "A StorageValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra StorageValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewStorageValueUnknown(), diags + } + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return NewStorageValueUnknown(), diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return NewStorageValueUnknown(), diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + if diags.HasError() { + return NewStorageValueUnknown(), diags + } + + return StorageValue{ + Class: classVal, + Size: sizeVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageValue { + object, diags := NewStorageValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewStorageValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t StorageType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewStorageValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewStorageValueUnknown(), nil + } + + if in.IsNull() { + return NewStorageValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewStorageValueMust(StorageValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t StorageType) ValueType(ctx context.Context) attr.Value { + return StorageValue{} +} + +var _ basetypes.ObjectValuable = StorageValue{} + +type StorageValue struct { + Class basetypes.StringValue `tfsdk:"class"` + Size basetypes.Int64Value `tfsdk:"size"` + state attr.ValueState +} + +func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 2) + + var val tftypes.Value + var err error + + attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 2) + + val, err = v.Class.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["class"] = val + + val, err = v.Size.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["size"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v StorageValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v StorageValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v StorageValue) String() string { + return "StorageValue" +} + +func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "class": basetypes.StringType{}, + "size": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "class": v.Class, + "size": v.Size, + }) + + return objVal, diags +} + +func (v StorageValue) Equal(o attr.Value) bool { + other, ok := o.(StorageValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Class.Equal(other.Class) { + return false + } + + if !v.Size.Equal(other.Size) { + return false + } + + return true +} + +func (v StorageValue) Type(ctx context.Context) attr.Type { + return StorageType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "class": basetypes.StringType{}, + "size": basetypes.Int64Type{}, + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go new file mode 100644 index 00000000..04fff1f6 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go @@ -0,0 +1,1172 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func InstancesDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "instances": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "is_deletable": schema.BoolAttribute{ + Computed: true, + Description: "Whether the instance can be deleted or not.", + MarkdownDescription: "Whether the instance can be deleted or not.", + }, + "name": schema.StringAttribute{ + Computed: true, + Description: "The name of the instance.", + MarkdownDescription: "The name of the instance.", + }, + "status": schema.StringAttribute{ + Computed: true, + }, + }, + CustomType: InstancesType{ + ObjectType: types.ObjectType{ + AttrTypes: InstancesValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "List of owned instances and their current status.", + MarkdownDescription: "List of owned instances and their current status.", + }, + "page": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of the page of items list to be returned.", + MarkdownDescription: "Number of the page of items list to be returned.", + }, + "pagination": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "page": schema.Int64Attribute{ + Computed: true, + }, + "size": schema.Int64Attribute{ + Computed: true, + }, + "sort": schema.StringAttribute{ + Computed: true, + }, + "total_pages": schema.Int64Attribute{ + Computed: true, + }, + "total_rows": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: PaginationType{ + ObjectType: types.ObjectType{ + AttrTypes: PaginationValue{}.AttributeTypes(ctx), + }, + }, + Computed: true, + }, + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "size": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of items to be returned on each page.", + MarkdownDescription: "Number of items to be returned on each page.", + }, + "sort": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "Sorting of the items to be returned on each page.", + MarkdownDescription: "Sorting of the items to be returned on each page.", + Validators: []validator.String{ + stringvalidator.OneOf( + "index.desc", + "index.asc", + "id.desc", + "id.asc", + "is_deletable.desc", + "is_deletable.asc", + "name.asc", + "name.desc", + "status.asc", + "status.desc", + ), + }, + }, + }, + } +} + +type InstancesModel struct { + Instances types.List `tfsdk:"instances"` + Page types.Int64 `tfsdk:"page"` + Pagination PaginationValue `tfsdk:"pagination"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Size types.Int64 `tfsdk:"size"` + Sort types.String `tfsdk:"sort"` +} + +var _ basetypes.ObjectTypable = InstancesType{} + +type InstancesType struct { + basetypes.ObjectType +} + +func (t InstancesType) Equal(o attr.Type) bool { + other, ok := o.(InstancesType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t InstancesType) String() string { + return "InstancesType" +} + +func (t InstancesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return nil, diags + } + + idVal, ok := idAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) + } + + isDeletableAttribute, ok := attributes["is_deletable"] + + if !ok { + diags.AddError( + "Attribute Missing", + `is_deletable is missing from object`) + + return nil, diags + } + + isDeletableVal, ok := isDeletableAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`is_deletable expected to be basetypes.BoolValue, was: %T`, isDeletableAttribute)) + } + + nameAttribute, ok := attributes["name"] + + if !ok { + diags.AddError( + "Attribute Missing", + `name is missing from object`) + + return nil, diags + } + + nameVal, ok := nameAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute)) + } + + statusAttribute, ok := attributes["status"] + + if !ok { + diags.AddError( + "Attribute Missing", + `status is missing from object`) + + return nil, diags + } + + statusVal, ok := statusAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return InstancesValue{ + Id: idVal, + IsDeletable: isDeletableVal, + Name: nameVal, + Status: statusVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewInstancesValueNull() InstancesValue { + return InstancesValue{ + state: attr.ValueStateNull, + } +} + +func NewInstancesValueUnknown() InstancesValue { + return InstancesValue{ + state: attr.ValueStateUnknown, + } +} + +func NewInstancesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (InstancesValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing InstancesValue Attribute Value", + "While creating a InstancesValue value, a missing attribute value was detected. "+ + "A InstancesValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("InstancesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid InstancesValue Attribute Type", + "While creating a InstancesValue value, an invalid attribute value was detected. "+ + "A InstancesValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("InstancesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("InstancesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra InstancesValue Attribute Value", + "While creating a InstancesValue value, an extra attribute value was detected. "+ + "A InstancesValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra InstancesValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewInstancesValueUnknown(), diags + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return NewInstancesValueUnknown(), diags + } + + idVal, ok := idAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) + } + + isDeletableAttribute, ok := attributes["is_deletable"] + + if !ok { + diags.AddError( + "Attribute Missing", + `is_deletable is missing from object`) + + return NewInstancesValueUnknown(), diags + } + + isDeletableVal, ok := isDeletableAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`is_deletable expected to be basetypes.BoolValue, was: %T`, isDeletableAttribute)) + } + + nameAttribute, ok := attributes["name"] + + if !ok { + diags.AddError( + "Attribute Missing", + `name is missing from object`) + + return NewInstancesValueUnknown(), diags + } + + nameVal, ok := nameAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute)) + } + + statusAttribute, ok := attributes["status"] + + if !ok { + diags.AddError( + "Attribute Missing", + `status is missing from object`) + + return NewInstancesValueUnknown(), diags + } + + statusVal, ok := statusAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute)) + } + + if diags.HasError() { + return NewInstancesValueUnknown(), diags + } + + return InstancesValue{ + Id: idVal, + IsDeletable: isDeletableVal, + Name: nameVal, + Status: statusVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewInstancesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) InstancesValue { + object, diags := NewInstancesValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewInstancesValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t InstancesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewInstancesValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewInstancesValueUnknown(), nil + } + + if in.IsNull() { + return NewInstancesValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewInstancesValueMust(InstancesValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t InstancesType) ValueType(ctx context.Context) attr.Value { + return InstancesValue{} +} + +var _ basetypes.ObjectValuable = InstancesValue{} + +type InstancesValue struct { + Id basetypes.StringValue `tfsdk:"id"` + IsDeletable basetypes.BoolValue `tfsdk:"is_deletable"` + Name basetypes.StringValue `tfsdk:"name"` + Status basetypes.StringValue `tfsdk:"status"` + state attr.ValueState +} + +func (v InstancesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 4) + + var val tftypes.Value + var err error + + attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["is_deletable"] = basetypes.BoolType{}.TerraformType(ctx) + attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 4) + + val, err = v.Id.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["id"] = val + + val, err = v.IsDeletable.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["is_deletable"] = val + + val, err = v.Name.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["name"] = val + + val, err = v.Status.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["status"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v InstancesValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v InstancesValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v InstancesValue) String() string { + return "InstancesValue" +} + +func (v InstancesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "id": basetypes.StringType{}, + "is_deletable": basetypes.BoolType{}, + "name": basetypes.StringType{}, + "status": basetypes.StringType{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "id": v.Id, + "is_deletable": v.IsDeletable, + "name": v.Name, + "status": v.Status, + }) + + return objVal, diags +} + +func (v InstancesValue) Equal(o attr.Value) bool { + other, ok := o.(InstancesValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Id.Equal(other.Id) { + return false + } + + if !v.IsDeletable.Equal(other.IsDeletable) { + return false + } + + if !v.Name.Equal(other.Name) { + return false + } + + if !v.Status.Equal(other.Status) { + return false + } + + return true +} + +func (v InstancesValue) Type(ctx context.Context) attr.Type { + return InstancesType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v InstancesValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "id": basetypes.StringType{}, + "is_deletable": basetypes.BoolType{}, + "name": basetypes.StringType{}, + "status": basetypes.StringType{}, + } +} + +var _ basetypes.ObjectTypable = PaginationType{} + +type PaginationType struct { + basetypes.ObjectType +} + +func (t PaginationType) Equal(o attr.Type) bool { + other, ok := o.(PaginationType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t PaginationType) String() string { + return "PaginationType" +} + +func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return nil, diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return nil, diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return nil, diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return nil, diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return nil, diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueNull() PaginationValue { + return PaginationValue{ + state: attr.ValueStateNull, + } +} + +func NewPaginationValueUnknown() PaginationValue { + return PaginationValue{ + state: attr.ValueStateUnknown, + } +} + +func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing PaginationValue Attribute Value", + "While creating a PaginationValue value, a missing attribute value was detected. "+ + "A PaginationValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid PaginationValue Attribute Type", + "While creating a PaginationValue value, an invalid attribute value was detected. "+ + "A PaginationValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra PaginationValue Attribute Value", + "While creating a PaginationValue value, an extra attribute value was detected. "+ + "A PaginationValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue { + object, diags := NewPaginationValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewPaginationValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewPaginationValueUnknown(), nil + } + + if in.IsNull() { + return NewPaginationValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t PaginationType) ValueType(ctx context.Context) attr.Value { + return PaginationValue{} +} + +var _ basetypes.ObjectValuable = PaginationValue{} + +type PaginationValue struct { + Page basetypes.Int64Value `tfsdk:"page"` + Size basetypes.Int64Value `tfsdk:"size"` + Sort basetypes.StringValue `tfsdk:"sort"` + TotalPages basetypes.Int64Value `tfsdk:"total_pages"` + TotalRows basetypes.Int64Value `tfsdk:"total_rows"` + state attr.ValueState +} + +func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 5) + + var val tftypes.Value + var err error + + attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 5) + + val, err = v.Page.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["page"] = val + + val, err = v.Size.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["size"] = val + + val, err = v.Sort.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["sort"] = val + + val, err = v.TotalPages.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_pages"] = val + + val, err = v.TotalRows.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_rows"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v PaginationValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v PaginationValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v PaginationValue) String() string { + return "PaginationValue" +} + +func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "page": v.Page, + "size": v.Size, + "sort": v.Sort, + "total_pages": v.TotalPages, + "total_rows": v.TotalRows, + }) + + return objVal, diags +} + +func (v PaginationValue) Equal(o attr.Value) bool { + other, ok := o.(PaginationValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Page.Equal(other.Page) { + return false + } + + if !v.Size.Equal(other.Size) { + return false + } + + if !v.Sort.Equal(other.Sort) { + return false + } + + if !v.TotalPages.Equal(other.TotalPages) { + return false + } + + if !v.TotalRows.Equal(other.TotalRows) { + return false + } + + return true +} + +func (v PaginationValue) Type(ctx context.Context) attr.Type { + return PaginationType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go new file mode 100644 index 00000000..d66f358e --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go @@ -0,0 +1,306 @@ +package sqlserverflexbeta + +import ( + "context" + "fmt" + "math" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + sqlserverflexbetaDataGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen" + sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen" +) + +func mapResponseToModel( + ctx context.Context, + resp *sqlserverflexbeta.GetInstanceResponse, + m *sqlserverflexbetaResGen.InstanceModel, + tfDiags diag.Diagnostics, +) error { + m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) + m.Edition = types.StringValue(string(resp.GetEdition())) + m.Encryption = handleEncryption(m, resp) + m.FlavorId = types.StringValue(resp.GetFlavorId()) + m.Id = types.StringValue(resp.GetId()) + m.InstanceId = types.StringValue(resp.GetId()) + m.IsDeletable = types.BoolValue(resp.GetIsDeletable()) + m.Name = types.StringValue(resp.GetName()) + netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl()) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf( + "error converting network acl response value", + ) + } + net, diags := sqlserverflexbetaResGen.NewNetworkValue( + sqlserverflexbetaResGen.NetworkValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "access_scope": types.StringValue(string(resp.Network.GetAccessScope())), + "acl": netAcl, + "instance_address": types.StringValue(resp.Network.GetInstanceAddress()), + "router_address": types.StringValue(resp.Network.GetRouterAddress()), + }, + ) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf( + "error converting network response value", + "access_scope", + types.StringValue(string(resp.Network.GetAccessScope())), + "acl", + netAcl, + "instance_address", + types.StringValue(resp.Network.GetInstanceAddress()), + "router_address", + types.StringValue(resp.Network.GetRouterAddress()), + ) + } + m.Network = net + m.Replicas = types.Int64Value(int64(resp.GetReplicas())) + m.RetentionDays = types.Int64Value(resp.GetRetentionDays()) + m.Status = types.StringValue(string(resp.GetStatus())) + + stor, diags := sqlserverflexbetaResGen.NewStorageValue( + sqlserverflexbetaResGen.StorageValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "class": types.StringValue(resp.Storage.GetClass()), + "size": types.Int64Value(resp.Storage.GetSize()), + }, + ) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf("error converting storage response value") + } + m.Storage = stor + + m.Version = types.StringValue(string(resp.GetVersion())) + return nil +} + +func mapDataResponseToModel( + ctx context.Context, + resp *sqlserverflexbeta.GetInstanceResponse, + m *sqlserverflexbetaDataGen.InstanceModel, + tfDiags diag.Diagnostics, +) error { + m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) + m.Edition = types.StringValue(string(resp.GetEdition())) + m.Encryption = handleDSEncryption(m, resp) + m.FlavorId = types.StringValue(resp.GetFlavorId()) + m.Id = types.StringValue(resp.GetId()) + m.InstanceId = types.StringValue(resp.GetId()) + m.IsDeletable = types.BoolValue(resp.GetIsDeletable()) + m.Name = types.StringValue(resp.GetName()) + netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl()) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf( + "error converting network acl response value", + ) + } + net, diags := sqlserverflexbetaDataGen.NewNetworkValue( + sqlserverflexbetaDataGen.NetworkValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "access_scope": types.StringValue(string(resp.Network.GetAccessScope())), + "acl": netAcl, + "instance_address": types.StringValue(resp.Network.GetInstanceAddress()), + "router_address": types.StringValue(resp.Network.GetRouterAddress()), + }, + ) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf( + "error converting network response value", + "access_scope", + types.StringValue(string(resp.Network.GetAccessScope())), + "acl", + netAcl, + "instance_address", + types.StringValue(resp.Network.GetInstanceAddress()), + "router_address", + types.StringValue(resp.Network.GetRouterAddress()), + ) + } + m.Network = net + m.Replicas = types.Int64Value(int64(resp.GetReplicas())) + m.RetentionDays = types.Int64Value(resp.GetRetentionDays()) + m.Status = types.StringValue(string(resp.GetStatus())) + + stor, diags := sqlserverflexbetaDataGen.NewStorageValue( + sqlserverflexbetaDataGen.StorageValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "class": types.StringValue(resp.Storage.GetClass()), + "size": types.Int64Value(resp.Storage.GetSize()), + }, + ) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf("error converting storage response value") + } + m.Storage = stor + + m.Version = types.StringValue(string(resp.GetVersion())) + return nil +} + +func handleEncryption( + m *sqlserverflexbetaResGen.InstanceModel, + resp *sqlserverflexbeta.GetInstanceResponse, +) sqlserverflexbetaResGen.EncryptionValue { + if !resp.HasEncryption() || + resp.Encryption == nil || + resp.Encryption.KekKeyId == nil || + resp.Encryption.KekKeyRingId == nil || + resp.Encryption.KekKeyVersion == nil || + resp.Encryption.ServiceAccount == nil { + + if m.Encryption.IsNull() || m.Encryption.IsUnknown() { + return sqlserverflexbetaResGen.NewEncryptionValueNull() + } + return m.Encryption + } + + enc := sqlserverflexbetaResGen.NewEncryptionValueNull() + if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { + enc.KekKeyId = types.StringValue(kVal) + } + if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { + enc.KekKeyRingId = types.StringValue(kkVal) + } + if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { + enc.KekKeyVersion = types.StringValue(kkvVal) + } + if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { + enc.ServiceAccount = types.StringValue(sa) + } + return enc +} + +func handleDSEncryption( + m *sqlserverflexbetaDataGen.InstanceModel, + resp *sqlserverflexbeta.GetInstanceResponse, +) sqlserverflexbetaDataGen.EncryptionValue { + if !resp.HasEncryption() || + resp.Encryption == nil || + resp.Encryption.KekKeyId == nil || + resp.Encryption.KekKeyRingId == nil || + resp.Encryption.KekKeyVersion == nil || + resp.Encryption.ServiceAccount == nil { + + if m.Encryption.IsNull() || m.Encryption.IsUnknown() { + return sqlserverflexbetaDataGen.NewEncryptionValueNull() + } + return m.Encryption + } + + enc := sqlserverflexbetaDataGen.NewEncryptionValueNull() + if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { + enc.KekKeyId = types.StringValue(kVal) + } + if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { + enc.KekKeyRingId = types.StringValue(kkVal) + } + if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { + enc.KekKeyVersion = types.StringValue(kkvVal) + } + if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { + enc.ServiceAccount = types.StringValue(sa) + } + return enc +} + +func toCreatePayload( + ctx context.Context, + model *sqlserverflexbetaResGen.InstanceModel, +) (*sqlserverflexbeta.CreateInstanceRequestPayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + storagePayload := &sqlserverflexbeta.CreateInstanceRequestPayloadGetStorageArgType{} + if !model.Storage.IsNull() && !model.Storage.IsUnknown() { + storagePayload.Class = model.Storage.Class.ValueStringPointer() + storagePayload.Size = model.Storage.Size.ValueInt64Pointer() + } + + var encryptionPayload *sqlserverflexbeta.CreateInstanceRequestPayloadGetEncryptionArgType = nil + if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() && + !model.Encryption.KekKeyId.IsNull() && model.Encryption.KekKeyId.IsUnknown() && model.Encryption.KekKeyId.ValueString() != "" && + !model.Encryption.KekKeyRingId.IsNull() && !model.Encryption.KekKeyRingId.IsUnknown() && model.Encryption.KekKeyRingId.ValueString() != "" && + !model.Encryption.KekKeyVersion.IsNull() && !model.Encryption.KekKeyVersion.IsUnknown() && model.Encryption.KekKeyVersion.ValueString() != "" && + !model.Encryption.ServiceAccount.IsNull() && !model.Encryption.ServiceAccount.IsUnknown() && model.Encryption.ServiceAccount.ValueString() != "" { + encryptionPayload = &sqlserverflexbeta.CreateInstanceRequestPayloadGetEncryptionArgType{ + KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(), + KekKeyRingId: model.Encryption.KekKeyVersion.ValueStringPointer(), + KekKeyVersion: model.Encryption.KekKeyRingId.ValueStringPointer(), + ServiceAccount: model.Encryption.ServiceAccount.ValueStringPointer(), + } + } + + networkPayload := &sqlserverflexbeta.CreateInstanceRequestPayloadGetNetworkArgType{} + if !model.Network.IsNull() && !model.Network.IsUnknown() { + networkPayload.AccessScope = sqlserverflexbeta.CreateInstanceRequestPayloadNetworkGetAccessScopeAttributeType( + model.Network.AccessScope.ValueStringPointer(), + ) + + var resList []string + diags := model.Network.Acl.ElementsAs(ctx, &resList, false) + if diags.HasError() { + return nil, fmt.Errorf("error converting network acl list") + } + networkPayload.Acl = &resList + } + + return &sqlserverflexbeta.CreateInstanceRequestPayload{ + BackupSchedule: conversion.StringValueToPointer(model.BackupSchedule), + Encryption: encryptionPayload, + FlavorId: conversion.StringValueToPointer(model.FlavorId), + Name: conversion.StringValueToPointer(model.Name), + Network: networkPayload, + RetentionDays: conversion.Int64ValueToPointer(model.RetentionDays), + Storage: storagePayload, + Version: sqlserverflexbeta.CreateInstanceRequestPayloadGetVersionAttributeType( + conversion.StringValueToPointer(model.Version), + ), + }, nil + +} + +func toUpdatePayload( + ctx context.Context, + m *sqlserverflexbetaResGen.InstanceModel, + resp *resource.UpdateResponse, +) (*sqlserverflexbeta.UpdateInstanceRequestPayload, error) { + if m == nil { + return nil, fmt.Errorf("nil model") + } + if m.Replicas.ValueInt64() > math.MaxUint32 { + return nil, fmt.Errorf("replicas value is too big for uint32") + } + replVal := sqlserverflexbeta.Replicas(uint32(m.Replicas.ValueInt64())) + + var netAcl []string + diags := m.Network.Acl.ElementsAs(ctx, &netAcl, false) + resp.Diagnostics.Append(diags...) + if diags.HasError() { + return nil, fmt.Errorf("error converting model network acl value") + } + return &sqlserverflexbeta.UpdateInstanceRequestPayload{ + BackupSchedule: m.BackupSchedule.ValueStringPointer(), + FlavorId: m.FlavorId.ValueStringPointer(), + Name: m.Name.ValueStringPointer(), + Network: &sqlserverflexbeta.UpdateInstanceRequestPayloadNetwork{ + Acl: &netAcl, + }, + Replicas: &replVal, + RetentionDays: m.RetentionDays.ValueInt64Pointer(), + Storage: &sqlserverflexbeta.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()}, + Version: sqlserverflexbeta.UpdateInstanceRequestPayloadGetVersionAttributeType( + m.Version.ValueStringPointer(), + ), + }, nil +} diff --git a/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml new file mode 100644 index 00000000..71d4cbe4 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml @@ -0,0 +1,124 @@ +fields: + - name: 'id' + modifiers: + - 'UseStateForUnknown' + + - name: 'instance_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'UseStateForUnknown' + + - name: 'project_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'name' + modifiers: + - 'UseStateForUnknown' + + - name: 'backup_schedule' + modifiers: + - 'UseStateForUnknown' + + - name: 'encryption.kek_key_id' + validators: + - validate.NoSeparator + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'encryption.kek_key_version' + validators: + - validate.NoSeparator + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'encryption.kek_key_ring_id' + validators: + - validate.NoSeparator + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'encryption.service_account' + validators: + - validate.NoSeparator + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'network.access_scope' + validators: + - validate.NoSeparator + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'network.acl' + modifiers: + - 'UseStateForUnknown' + + - name: 'network.instance_address' + modifiers: + - 'UseStateForUnknown' + + - name: 'network.router_address' + modifiers: + - 'UseStateForUnknown' + + - name: 'status' + modifiers: + - 'UseStateForUnknown' + + - name: 'region' + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'retention_days' + modifiers: + - 'UseStateForUnknown' + + - name: 'edition' + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'version' + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'replicas' + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'storage' + modifiers: + - 'UseStateForUnknown' + + - name: 'storage.class' + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'storage.size' + modifiers: + - 'UseStateForUnknown' + + - name: 'flavor_id' + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'is_deletable' + modifiers: + - 'UseStateForUnknown' diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go new file mode 100644 index 00000000..a6325169 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -0,0 +1,521 @@ +package sqlserverflexbeta + +import ( + "context" + _ "embed" + "fmt" + "net/http" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen" +) + +var ( + _ resource.Resource = &instanceResource{} + _ resource.ResourceWithConfigure = &instanceResource{} + _ resource.ResourceWithImportState = &instanceResource{} + _ resource.ResourceWithModifyPlan = &instanceResource{} + _ resource.ResourceWithIdentity = &instanceResource{} +) + +func NewInstanceResource() resource.Resource { + return &instanceResource{} +} + +type instanceResource struct { + client *sqlserverflexbeta.APIClient + providerData core.ProviderData +} + +type InstanceResourceIdentityModel struct { + ProjectID types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + InstanceID types.String `tfsdk:"instance_id"` +} + +func (r *instanceResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance" +} + +func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = sqlserverflexbetaResGen.InstanceResourceSchema(ctx) +} + +func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { + resp.IdentitySchema = identityschema.Schema{ + Attributes: map[string]identityschema.Attribute{ + "project_id": identityschema.StringAttribute{ + RequiredForImport: true, // must be set during import by the practitioner + }, + "region": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "instance_id": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + }, + } +} + +// Configure adds the provider configured client to the resource. +func (r *instanceResource) Configure( + ctx context.Context, + req resource.ConfigureRequest, + resp *resource.ConfigureResponse, +) { + var ok bool + r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(r.providerData.RoundTripper), + utils.UserAgentConfigOption(r.providerData.Version), + } + if r.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint)) + } else { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) + } + apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return + } + r.client = apiClient + tflog.Info(ctx, "sqlserverflexbeta.Instance client configured") +} + +// ModifyPlan implements resource.ResourceWithModifyPlan. +// Use the modifier to set the effective region in the current plan. +func (r *instanceResource) ModifyPlan( + ctx context.Context, + req resource.ModifyPlanRequest, + resp *resource.ModifyPlanResponse, +) { // nolint:gocritic // function signature required by Terraform + + // skip initial empty configuration to avoid follow-up errors + if req.Config.Raw.IsNull() { + return + } + var configModel sqlserverflexbetaResGen.InstanceModel + resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) + if resp.Diagnostics.HasError() { + return + } + + if req.Plan.Raw.IsNull() { + return + } + var planModel sqlserverflexbetaResGen.InstanceModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) + if resp.Diagnostics.HasError() { + return + } + + utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp) + if resp.Diagnostics.HasError() { + return + } + + var identityModel InstanceResourceIdentityModel + identityModel.ProjectID = planModel.ProjectId + identityModel.Region = planModel.Region + if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { + identityModel.InstanceID = planModel.InstanceId + } + + resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) + if resp.Diagnostics.HasError() { + return + } +} + +//go:embed planModifiers.yaml +var modifiersFileByte []byte + +func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data sqlserverflexbetaResGen.InstanceModel + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData InstanceResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Generate API request body from model + payload, err := toCreatePayload(ctx, &data) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating Instance", + fmt.Sprintf("Creating API payload: %v", err), + ) + return + } + // Create new Instance + createResp, err := r.client.CreateInstanceRequest( + ctx, + projectId, + region, + ).CreateInstanceRequestPayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating Instance", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + InstanceId := *createResp.Id + + // Example data value setting + data.InstanceId = types.StringValue("id-from-response") + + identity := InstanceResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(InstanceId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + waitResp, err := wait.CreateInstanceWaitHandler( + ctx, + r.client, + projectId, + InstanceId, + region, + ).SetSleepBeforeWait( + 30 * time.Second, + ).SetTimeout( + 90 * time.Minute, + ).WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating Instance", + fmt.Sprintf("Instance creation waiting: %v", err), + ) + return + } + + if waitResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating Instance", + "Instance creation waiting: returned id is nil", + ) + return + } + + // Map response body to schema + err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating Instance", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "sqlserverflexbeta.Instance created") +} + +func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data sqlserverflexbetaResGen.InstanceModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData InstanceResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + instanceId := data.InstanceId.ValueString() + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() + if err != nil { + oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped + if ok && oapiErr.StatusCode == http.StatusNotFound { + resp.State.RemoveResource(ctx) + return + } + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", err.Error()) + return + } + + ctx = core.LogResponse(ctx) + + // Map response body to schema + err = mapResponseToModel(ctx, instanceResp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error reading instance", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + identity := InstanceResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Info(ctx, "sqlserverflexbeta.Instance read") +} + +func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var data sqlserverflexbetaResGen.InstanceModel + updateInstanceError := "Error updating instance" + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData InstanceResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + instanceId := data.InstanceId.ValueString() + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + // Generate API request body from model + payload, err := toUpdatePayload(ctx, &data, resp) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + updateInstanceError, + fmt.Sprintf("Creating API payload: %v", err), + ) + return + } + // Update existing instance + err = r.client.UpdateInstanceRequest( + ctx, + projectId, + region, + instanceId, + ).UpdateInstanceRequestPayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, updateInstanceError, err.Error()) + return + } + + ctx = core.LogResponse(ctx) + + waitResp, err := wait.UpdateInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + updateInstanceError, + fmt.Sprintf("Instance update waiting: %v", err), + ) + return + } + + // Map response body to schema + err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + updateInstanceError, + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "sqlserverflexbeta.Instance updated") +} + +func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data sqlserverflexbetaResGen.InstanceModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData InstanceResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + instanceId := identityData.InstanceID.ValueString() + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + // Delete existing instance + err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + _, err = wait.DeleteInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error deleting instance", + fmt.Sprintf("Instance deletion waiting: %v", err), + ) + return + } + + tflog.Info(ctx, "sqlserverflexbeta.Instance deleted") +} + +// ImportState imports a resource into the Terraform state on success. +// The expected format of the resource import identifier is: project_id,zone_id,record_set_id +func (r *instanceResource) ImportState( + ctx context.Context, + req resource.ImportStateRequest, + resp *resource.ImportStateResponse, +) { + ctx = core.InitProviderContext(ctx) + + if req.ID != "" { + idParts := strings.Split(req.ID, core.Separator) + + if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" { + core.LogAndAddError(ctx, &resp.Diagnostics, + "Error importing instance", + fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) + return + } + + var identityData InstanceResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append( + resp.State.SetAttribute( + ctx, + path.Root("id"), + utils.BuildInternalTerraformId( + identityData.ProjectID.ValueString(), + identityData.Region.ValueString(), + identityData.InstanceID.ValueString(), + ), + )...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...) + + tflog.Info(ctx, "Sqlserverflexbeta instance state imported") +} diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go new file mode 100644 index 00000000..f8865ae5 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go @@ -0,0 +1,1597 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema" +) + +func InstanceResourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "backup_schedule": schema.StringAttribute{ + Required: true, + Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.", + MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.", + }, + "edition": schema.StringAttribute{ + Computed: true, + Description: "Edition of the MSSQL server instance", + MarkdownDescription: "Edition of the MSSQL server instance", + }, + "encryption": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "kek_key_id": schema.StringAttribute{ + Required: true, + Description: "The key identifier", + MarkdownDescription: "The key identifier", + }, + "kek_key_ring_id": schema.StringAttribute{ + Required: true, + Description: "The keyring identifier", + MarkdownDescription: "The keyring identifier", + }, + "kek_key_version": schema.StringAttribute{ + Required: true, + Description: "The key version", + MarkdownDescription: "The key version", + }, + "service_account": schema.StringAttribute{ + Required: true, + }, + }, + CustomType: EncryptionType{ + ObjectType: types.ObjectType{ + AttrTypes: EncryptionValue{}.AttributeTypes(ctx), + }, + }, + Optional: true, + Computed: true, + Description: "this defines which key to use for storage encryption", + MarkdownDescription: "this defines which key to use for storage encryption", + }, + "flavor_id": schema.StringAttribute{ + Required: true, + Description: "The id of the instance flavor.", + MarkdownDescription: "The id of the instance flavor.", + }, + "id": schema.StringAttribute{ + Computed: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "instance_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "is_deletable": schema.BoolAttribute{ + Computed: true, + Description: "Whether the instance can be deleted or not.", + MarkdownDescription: "Whether the instance can be deleted or not.", + }, + "name": schema.StringAttribute{ + Required: true, + Description: "The name of the instance.", + MarkdownDescription: "The name of the instance.", + }, + "network": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "access_scope": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n", + MarkdownDescription: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n", + Validators: []validator.String{ + stringvalidator.OneOf( + "PUBLIC", + "SNA", + ), + }, + Default: stringdefault.StaticString("PUBLIC"), + }, + "acl": schema.ListAttribute{ + ElementType: types.StringType, + Required: true, + Description: "List of IPV4 cidr.", + MarkdownDescription: "List of IPV4 cidr.", + }, + "instance_address": schema.StringAttribute{ + Computed: true, + }, + "router_address": schema.StringAttribute{ + Computed: true, + }, + }, + CustomType: NetworkType{ + ObjectType: types.ObjectType{ + AttrTypes: NetworkValue{}.AttributeTypes(ctx), + }, + }, + Required: true, + Description: "the network configuration of the instance.", + MarkdownDescription: "the network configuration of the instance.", + }, + "project_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "replicas": schema.Int64Attribute{ + Computed: true, + Description: "How many replicas the instance should have.", + MarkdownDescription: "How many replicas the instance should have.", + }, + "retention_days": schema.Int64Attribute{ + Required: true, + Description: "The days for how long the backup files should be stored before cleaned up. 30 to 365", + MarkdownDescription: "The days for how long the backup files should be stored before cleaned up. 30 to 365", + }, + "status": schema.StringAttribute{ + Computed: true, + }, + "storage": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "class": schema.StringAttribute{ + Required: true, + Description: "The storage class for the storage.", + MarkdownDescription: "The storage class for the storage.", + }, + "size": schema.Int64Attribute{ + Required: true, + Description: "The storage size in Gigabytes.", + MarkdownDescription: "The storage size in Gigabytes.", + }, + }, + CustomType: StorageType{ + ObjectType: types.ObjectType{ + AttrTypes: StorageValue{}.AttributeTypes(ctx), + }, + }, + Required: true, + Description: "The object containing information about the storage size and class.", + MarkdownDescription: "The object containing information about the storage size and class.", + }, + "version": schema.StringAttribute{ + Required: true, + Description: "The sqlserver version used for the instance.", + MarkdownDescription: "The sqlserver version used for the instance.", + Validators: []validator.String{ + stringvalidator.OneOf( + "2022", + ), + }, + }, + }, + } +} + +type InstanceModel struct { + BackupSchedule types.String `tfsdk:"backup_schedule"` + Edition types.String `tfsdk:"edition"` + Encryption EncryptionValue `tfsdk:"encryption"` + FlavorId types.String `tfsdk:"flavor_id"` + Id types.String `tfsdk:"id"` + InstanceId types.String `tfsdk:"instance_id"` + IsDeletable types.Bool `tfsdk:"is_deletable"` + Name types.String `tfsdk:"name"` + Network NetworkValue `tfsdk:"network"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Replicas types.Int64 `tfsdk:"replicas"` + RetentionDays types.Int64 `tfsdk:"retention_days"` + Status types.String `tfsdk:"status"` + Storage StorageValue `tfsdk:"storage"` + Version types.String `tfsdk:"version"` +} + +var _ basetypes.ObjectTypable = EncryptionType{} + +type EncryptionType struct { + basetypes.ObjectType +} + +func (t EncryptionType) Equal(o attr.Type) bool { + other, ok := o.(EncryptionType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t EncryptionType) String() string { + return "EncryptionType" +} + +func (t EncryptionType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + kekKeyIdAttribute, ok := attributes["kek_key_id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_id is missing from object`) + + return nil, diags + } + + kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute)) + } + + kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_ring_id is missing from object`) + + return nil, diags + } + + kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute)) + } + + kekKeyVersionAttribute, ok := attributes["kek_key_version"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_version is missing from object`) + + return nil, diags + } + + kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute)) + } + + serviceAccountAttribute, ok := attributes["service_account"] + + if !ok { + diags.AddError( + "Attribute Missing", + `service_account is missing from object`) + + return nil, diags + } + + serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return EncryptionValue{ + KekKeyId: kekKeyIdVal, + KekKeyRingId: kekKeyRingIdVal, + KekKeyVersion: kekKeyVersionVal, + ServiceAccount: serviceAccountVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewEncryptionValueNull() EncryptionValue { + return EncryptionValue{ + state: attr.ValueStateNull, + } +} + +func NewEncryptionValueUnknown() EncryptionValue { + return EncryptionValue{ + state: attr.ValueStateUnknown, + } +} + +func NewEncryptionValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (EncryptionValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing EncryptionValue Attribute Value", + "While creating a EncryptionValue value, a missing attribute value was detected. "+ + "A EncryptionValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid EncryptionValue Attribute Type", + "While creating a EncryptionValue value, an invalid attribute value was detected. "+ + "A EncryptionValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("EncryptionValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra EncryptionValue Attribute Value", + "While creating a EncryptionValue value, an extra attribute value was detected. "+ + "A EncryptionValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra EncryptionValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewEncryptionValueUnknown(), diags + } + + kekKeyIdAttribute, ok := attributes["kek_key_id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_id is missing from object`) + + return NewEncryptionValueUnknown(), diags + } + + kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute)) + } + + kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_ring_id is missing from object`) + + return NewEncryptionValueUnknown(), diags + } + + kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute)) + } + + kekKeyVersionAttribute, ok := attributes["kek_key_version"] + + if !ok { + diags.AddError( + "Attribute Missing", + `kek_key_version is missing from object`) + + return NewEncryptionValueUnknown(), diags + } + + kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute)) + } + + serviceAccountAttribute, ok := attributes["service_account"] + + if !ok { + diags.AddError( + "Attribute Missing", + `service_account is missing from object`) + + return NewEncryptionValueUnknown(), diags + } + + serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute)) + } + + if diags.HasError() { + return NewEncryptionValueUnknown(), diags + } + + return EncryptionValue{ + KekKeyId: kekKeyIdVal, + KekKeyRingId: kekKeyRingIdVal, + KekKeyVersion: kekKeyVersionVal, + ServiceAccount: serviceAccountVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewEncryptionValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) EncryptionValue { + object, diags := NewEncryptionValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewEncryptionValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t EncryptionType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewEncryptionValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewEncryptionValueUnknown(), nil + } + + if in.IsNull() { + return NewEncryptionValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewEncryptionValueMust(EncryptionValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t EncryptionType) ValueType(ctx context.Context) attr.Value { + return EncryptionValue{} +} + +var _ basetypes.ObjectValuable = EncryptionValue{} + +type EncryptionValue struct { + KekKeyId basetypes.StringValue `tfsdk:"kek_key_id"` + KekKeyRingId basetypes.StringValue `tfsdk:"kek_key_ring_id"` + KekKeyVersion basetypes.StringValue `tfsdk:"kek_key_version"` + ServiceAccount basetypes.StringValue `tfsdk:"service_account"` + state attr.ValueState +} + +func (v EncryptionValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 4) + + var val tftypes.Value + var err error + + attrTypes["kek_key_id"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["kek_key_ring_id"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["kek_key_version"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["service_account"] = basetypes.StringType{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 4) + + val, err = v.KekKeyId.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["kek_key_id"] = val + + val, err = v.KekKeyRingId.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["kek_key_ring_id"] = val + + val, err = v.KekKeyVersion.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["kek_key_version"] = val + + val, err = v.ServiceAccount.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["service_account"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v EncryptionValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v EncryptionValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v EncryptionValue) String() string { + return "EncryptionValue" +} + +func (v EncryptionValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "kek_key_id": basetypes.StringType{}, + "kek_key_ring_id": basetypes.StringType{}, + "kek_key_version": basetypes.StringType{}, + "service_account": basetypes.StringType{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "kek_key_id": v.KekKeyId, + "kek_key_ring_id": v.KekKeyRingId, + "kek_key_version": v.KekKeyVersion, + "service_account": v.ServiceAccount, + }) + + return objVal, diags +} + +func (v EncryptionValue) Equal(o attr.Value) bool { + other, ok := o.(EncryptionValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.KekKeyId.Equal(other.KekKeyId) { + return false + } + + if !v.KekKeyRingId.Equal(other.KekKeyRingId) { + return false + } + + if !v.KekKeyVersion.Equal(other.KekKeyVersion) { + return false + } + + if !v.ServiceAccount.Equal(other.ServiceAccount) { + return false + } + + return true +} + +func (v EncryptionValue) Type(ctx context.Context) attr.Type { + return EncryptionType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v EncryptionValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "kek_key_id": basetypes.StringType{}, + "kek_key_ring_id": basetypes.StringType{}, + "kek_key_version": basetypes.StringType{}, + "service_account": basetypes.StringType{}, + } +} + +var _ basetypes.ObjectTypable = NetworkType{} + +type NetworkType struct { + basetypes.ObjectType +} + +func (t NetworkType) Equal(o attr.Type) bool { + other, ok := o.(NetworkType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t NetworkType) String() string { + return "NetworkType" +} + +func (t NetworkType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + accessScopeAttribute, ok := attributes["access_scope"] + + if !ok { + diags.AddError( + "Attribute Missing", + `access_scope is missing from object`) + + return nil, diags + } + + accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute)) + } + + aclAttribute, ok := attributes["acl"] + + if !ok { + diags.AddError( + "Attribute Missing", + `acl is missing from object`) + + return nil, diags + } + + aclVal, ok := aclAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute)) + } + + instanceAddressAttribute, ok := attributes["instance_address"] + + if !ok { + diags.AddError( + "Attribute Missing", + `instance_address is missing from object`) + + return nil, diags + } + + instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute)) + } + + routerAddressAttribute, ok := attributes["router_address"] + + if !ok { + diags.AddError( + "Attribute Missing", + `router_address is missing from object`) + + return nil, diags + } + + routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return NetworkValue{ + AccessScope: accessScopeVal, + Acl: aclVal, + InstanceAddress: instanceAddressVal, + RouterAddress: routerAddressVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewNetworkValueNull() NetworkValue { + return NetworkValue{ + state: attr.ValueStateNull, + } +} + +func NewNetworkValueUnknown() NetworkValue { + return NetworkValue{ + state: attr.ValueStateUnknown, + } +} + +func NewNetworkValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (NetworkValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing NetworkValue Attribute Value", + "While creating a NetworkValue value, a missing attribute value was detected. "+ + "A NetworkValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid NetworkValue Attribute Type", + "While creating a NetworkValue value, an invalid attribute value was detected. "+ + "A NetworkValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("NetworkValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra NetworkValue Attribute Value", + "While creating a NetworkValue value, an extra attribute value was detected. "+ + "A NetworkValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra NetworkValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewNetworkValueUnknown(), diags + } + + accessScopeAttribute, ok := attributes["access_scope"] + + if !ok { + diags.AddError( + "Attribute Missing", + `access_scope is missing from object`) + + return NewNetworkValueUnknown(), diags + } + + accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute)) + } + + aclAttribute, ok := attributes["acl"] + + if !ok { + diags.AddError( + "Attribute Missing", + `acl is missing from object`) + + return NewNetworkValueUnknown(), diags + } + + aclVal, ok := aclAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute)) + } + + instanceAddressAttribute, ok := attributes["instance_address"] + + if !ok { + diags.AddError( + "Attribute Missing", + `instance_address is missing from object`) + + return NewNetworkValueUnknown(), diags + } + + instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute)) + } + + routerAddressAttribute, ok := attributes["router_address"] + + if !ok { + diags.AddError( + "Attribute Missing", + `router_address is missing from object`) + + return NewNetworkValueUnknown(), diags + } + + routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute)) + } + + if diags.HasError() { + return NewNetworkValueUnknown(), diags + } + + return NetworkValue{ + AccessScope: accessScopeVal, + Acl: aclVal, + InstanceAddress: instanceAddressVal, + RouterAddress: routerAddressVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewNetworkValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) NetworkValue { + object, diags := NewNetworkValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewNetworkValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t NetworkType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewNetworkValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewNetworkValueUnknown(), nil + } + + if in.IsNull() { + return NewNetworkValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewNetworkValueMust(NetworkValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t NetworkType) ValueType(ctx context.Context) attr.Value { + return NetworkValue{} +} + +var _ basetypes.ObjectValuable = NetworkValue{} + +type NetworkValue struct { + AccessScope basetypes.StringValue `tfsdk:"access_scope"` + Acl basetypes.ListValue `tfsdk:"acl"` + InstanceAddress basetypes.StringValue `tfsdk:"instance_address"` + RouterAddress basetypes.StringValue `tfsdk:"router_address"` + state attr.ValueState +} + +func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 4) + + var val tftypes.Value + var err error + + attrTypes["access_scope"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["acl"] = basetypes.ListType{ + ElemType: types.StringType, + }.TerraformType(ctx) + attrTypes["instance_address"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["router_address"] = basetypes.StringType{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 4) + + val, err = v.AccessScope.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["access_scope"] = val + + val, err = v.Acl.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["acl"] = val + + val, err = v.InstanceAddress.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["instance_address"] = val + + val, err = v.RouterAddress.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["router_address"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v NetworkValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v NetworkValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v NetworkValue) String() string { + return "NetworkValue" +} + +func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + var aclVal basetypes.ListValue + switch { + case v.Acl.IsUnknown(): + aclVal = types.ListUnknown(types.StringType) + case v.Acl.IsNull(): + aclVal = types.ListNull(types.StringType) + default: + var d diag.Diagnostics + aclVal, d = types.ListValue(types.StringType, v.Acl.Elements()) + diags.Append(d...) + } + + if diags.HasError() { + return types.ObjectUnknown(map[string]attr.Type{ + "access_scope": basetypes.StringType{}, + "acl": basetypes.ListType{ + ElemType: types.StringType, + }, + "instance_address": basetypes.StringType{}, + "router_address": basetypes.StringType{}, + }), diags + } + + attributeTypes := map[string]attr.Type{ + "access_scope": basetypes.StringType{}, + "acl": basetypes.ListType{ + ElemType: types.StringType, + }, + "instance_address": basetypes.StringType{}, + "router_address": basetypes.StringType{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "access_scope": v.AccessScope, + "acl": aclVal, + "instance_address": v.InstanceAddress, + "router_address": v.RouterAddress, + }) + + return objVal, diags +} + +func (v NetworkValue) Equal(o attr.Value) bool { + other, ok := o.(NetworkValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.AccessScope.Equal(other.AccessScope) { + return false + } + + if !v.Acl.Equal(other.Acl) { + return false + } + + if !v.InstanceAddress.Equal(other.InstanceAddress) { + return false + } + + if !v.RouterAddress.Equal(other.RouterAddress) { + return false + } + + return true +} + +func (v NetworkValue) Type(ctx context.Context) attr.Type { + return NetworkType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v NetworkValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "access_scope": basetypes.StringType{}, + "acl": basetypes.ListType{ + ElemType: types.StringType, + }, + "instance_address": basetypes.StringType{}, + "router_address": basetypes.StringType{}, + } +} + +var _ basetypes.ObjectTypable = StorageType{} + +type StorageType struct { + basetypes.ObjectType +} + +func (t StorageType) Equal(o attr.Type) bool { + other, ok := o.(StorageType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t StorageType) String() string { + return "StorageType" +} + +func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return nil, diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return nil, diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return StorageValue{ + Class: classVal, + Size: sizeVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageValueNull() StorageValue { + return StorageValue{ + state: attr.ValueStateNull, + } +} + +func NewStorageValueUnknown() StorageValue { + return StorageValue{ + state: attr.ValueStateUnknown, + } +} + +func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing StorageValue Attribute Value", + "While creating a StorageValue value, a missing attribute value was detected. "+ + "A StorageValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid StorageValue Attribute Type", + "While creating a StorageValue value, an invalid attribute value was detected. "+ + "A StorageValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("StorageValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra StorageValue Attribute Value", + "While creating a StorageValue value, an extra attribute value was detected. "+ + "A StorageValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra StorageValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewStorageValueUnknown(), diags + } + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return NewStorageValueUnknown(), diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return NewStorageValueUnknown(), diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + if diags.HasError() { + return NewStorageValueUnknown(), diags + } + + return StorageValue{ + Class: classVal, + Size: sizeVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageValue { + object, diags := NewStorageValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewStorageValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t StorageType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewStorageValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewStorageValueUnknown(), nil + } + + if in.IsNull() { + return NewStorageValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewStorageValueMust(StorageValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t StorageType) ValueType(ctx context.Context) attr.Value { + return StorageValue{} +} + +var _ basetypes.ObjectValuable = StorageValue{} + +type StorageValue struct { + Class basetypes.StringValue `tfsdk:"class"` + Size basetypes.Int64Value `tfsdk:"size"` + state attr.ValueState +} + +func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 2) + + var val tftypes.Value + var err error + + attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 2) + + val, err = v.Class.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["class"] = val + + val, err = v.Size.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["size"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v StorageValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v StorageValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v StorageValue) String() string { + return "StorageValue" +} + +func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "class": basetypes.StringType{}, + "size": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "class": v.Class, + "size": v.Size, + }) + + return objVal, diags +} + +func (v StorageValue) Equal(o attr.Value) bool { + other, ok := o.(StorageValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Class.Equal(other.Class) { + return false + } + + if !v.Size.Equal(other.Size) { + return false + } + + return true +} + +func (v StorageValue) Type(ctx context.Context) attr.Type { + return StorageType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "class": basetypes.StringType{}, + "size": basetypes.Int64Type{}, + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go new file mode 100644 index 00000000..985692a6 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/user/datasource.go @@ -0,0 +1,118 @@ +package sqlserverflexbeta + +import ( + "context" + "fmt" + "net/http" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + + sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" + + sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/datasources_gen" +) + +var _ datasource.DataSource = (*userDataSource)(nil) + +const errorPrefix = "[Sqlserverflexbeta - User]" + +func NewUserDataSource() datasource.DataSource { + return &userDataSource{} +} + +type userDataSource struct { + client *sqlserverflexbetaPkg.APIClient + providerData core.ProviderData +} + +func (d *userDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user" +} + +func (d *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = sqlserverflexbetaGen.UserDataSourceSchema(ctx) +} + +// Configure adds the provider configured client to the data source. +func (d *userDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { + var ok bool + d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + d.client = apiClient + tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) +} + +func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data sqlserverflexbetaGen.UserModel + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := data.ProjectId.ValueString() + region := d.providerData.GetRegionWithOverride(data.Region) + userId := data.UserId.ValueString() + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "user_id", userId) + + userResp, err := d.client.GetUserRequest(ctx, projectId, region, userId).Execute() + if err != nil { + utils.LogError( + ctx, + &resp.Diagnostics, + err, + "Reading user", + fmt.Sprintf("user with ID %q does not exist in project %q.", userId, projectId), + map[int]string{ + http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), + }, + ) + resp.State.RemoveResource(ctx) + return + } + + ctx = core.LogResponse(ctx) + + // Todo: Read API call logic + + // Example data value setting + // data.Id = types.StringValue("example-id") + + err = mapResponseToModel(ctx, userResp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + fmt.Sprintf("%s Read", errorPrefix), + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go new file mode 100644 index 00000000..1950c24e --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go @@ -0,0 +1,1118 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func UserDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "instance_id": schema.StringAttribute{ + Required: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "page": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of the page of items list to be returned.", + MarkdownDescription: "Number of the page of items list to be returned.", + }, + "pagination": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "page": schema.Int64Attribute{ + Computed: true, + }, + "size": schema.Int64Attribute{ + Computed: true, + }, + "sort": schema.StringAttribute{ + Computed: true, + }, + "total_pages": schema.Int64Attribute{ + Computed: true, + }, + "total_rows": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: PaginationType{ + ObjectType: types.ObjectType{ + AttrTypes: PaginationValue{}.AttributeTypes(ctx), + }, + }, + Computed: true, + }, + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "size": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of items to be returned on each page.", + MarkdownDescription: "Number of items to be returned on each page.", + }, + "sort": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "Sorting of the users to be returned on each page.", + MarkdownDescription: "Sorting of the users to be returned on each page.", + Validators: []validator.String{ + stringvalidator.OneOf( + "id.asc", + "id.desc", + "index.desc", + "index.asc", + "name.desc", + "name.asc", + "status.desc", + "status.asc", + ), + }, + }, + "users": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "id": schema.Int64Attribute{ + Computed: true, + Description: "The ID of the user.", + MarkdownDescription: "The ID of the user.", + }, + "status": schema.StringAttribute{ + Computed: true, + Description: "The current status of the user.", + MarkdownDescription: "The current status of the user.", + }, + "username": schema.StringAttribute{ + Computed: true, + Description: "The name of the user.", + MarkdownDescription: "The name of the user.", + }, + }, + CustomType: UsersType{ + ObjectType: types.ObjectType{ + AttrTypes: UsersValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "List of all users inside an instance", + MarkdownDescription: "List of all users inside an instance", + }, + }, + } +} + +type UserModel struct { + InstanceId types.String `tfsdk:"instance_id"` + Page types.Int64 `tfsdk:"page"` + Pagination PaginationValue `tfsdk:"pagination"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Size types.Int64 `tfsdk:"size"` + Sort types.String `tfsdk:"sort"` + Users types.List `tfsdk:"users"` +} + +var _ basetypes.ObjectTypable = PaginationType{} + +type PaginationType struct { + basetypes.ObjectType +} + +func (t PaginationType) Equal(o attr.Type) bool { + other, ok := o.(PaginationType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t PaginationType) String() string { + return "PaginationType" +} + +func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return nil, diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return nil, diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return nil, diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return nil, diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return nil, diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueNull() PaginationValue { + return PaginationValue{ + state: attr.ValueStateNull, + } +} + +func NewPaginationValueUnknown() PaginationValue { + return PaginationValue{ + state: attr.ValueStateUnknown, + } +} + +func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing PaginationValue Attribute Value", + "While creating a PaginationValue value, a missing attribute value was detected. "+ + "A PaginationValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid PaginationValue Attribute Type", + "While creating a PaginationValue value, an invalid attribute value was detected. "+ + "A PaginationValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra PaginationValue Attribute Value", + "While creating a PaginationValue value, an extra attribute value was detected. "+ + "A PaginationValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue { + object, diags := NewPaginationValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewPaginationValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewPaginationValueUnknown(), nil + } + + if in.IsNull() { + return NewPaginationValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t PaginationType) ValueType(ctx context.Context) attr.Value { + return PaginationValue{} +} + +var _ basetypes.ObjectValuable = PaginationValue{} + +type PaginationValue struct { + Page basetypes.Int64Value `tfsdk:"page"` + Size basetypes.Int64Value `tfsdk:"size"` + Sort basetypes.StringValue `tfsdk:"sort"` + TotalPages basetypes.Int64Value `tfsdk:"total_pages"` + TotalRows basetypes.Int64Value `tfsdk:"total_rows"` + state attr.ValueState +} + +func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 5) + + var val tftypes.Value + var err error + + attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 5) + + val, err = v.Page.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["page"] = val + + val, err = v.Size.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["size"] = val + + val, err = v.Sort.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["sort"] = val + + val, err = v.TotalPages.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_pages"] = val + + val, err = v.TotalRows.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_rows"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v PaginationValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v PaginationValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v PaginationValue) String() string { + return "PaginationValue" +} + +func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "page": v.Page, + "size": v.Size, + "sort": v.Sort, + "total_pages": v.TotalPages, + "total_rows": v.TotalRows, + }) + + return objVal, diags +} + +func (v PaginationValue) Equal(o attr.Value) bool { + other, ok := o.(PaginationValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Page.Equal(other.Page) { + return false + } + + if !v.Size.Equal(other.Size) { + return false + } + + if !v.Sort.Equal(other.Sort) { + return false + } + + if !v.TotalPages.Equal(other.TotalPages) { + return false + } + + if !v.TotalRows.Equal(other.TotalRows) { + return false + } + + return true +} + +func (v PaginationValue) Type(ctx context.Context) attr.Type { + return PaginationType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } +} + +var _ basetypes.ObjectTypable = UsersType{} + +type UsersType struct { + basetypes.ObjectType +} + +func (t UsersType) Equal(o attr.Type) bool { + other, ok := o.(UsersType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t UsersType) String() string { + return "UsersType" +} + +func (t UsersType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return nil, diags + } + + idVal, ok := idAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute)) + } + + statusAttribute, ok := attributes["status"] + + if !ok { + diags.AddError( + "Attribute Missing", + `status is missing from object`) + + return nil, diags + } + + statusVal, ok := statusAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute)) + } + + usernameAttribute, ok := attributes["username"] + + if !ok { + diags.AddError( + "Attribute Missing", + `username is missing from object`) + + return nil, diags + } + + usernameVal, ok := usernameAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`username expected to be basetypes.StringValue, was: %T`, usernameAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return UsersValue{ + Id: idVal, + Status: statusVal, + Username: usernameVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewUsersValueNull() UsersValue { + return UsersValue{ + state: attr.ValueStateNull, + } +} + +func NewUsersValueUnknown() UsersValue { + return UsersValue{ + state: attr.ValueStateUnknown, + } +} + +func NewUsersValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (UsersValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing UsersValue Attribute Value", + "While creating a UsersValue value, a missing attribute value was detected. "+ + "A UsersValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("UsersValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid UsersValue Attribute Type", + "While creating a UsersValue value, an invalid attribute value was detected. "+ + "A UsersValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("UsersValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("UsersValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra UsersValue Attribute Value", + "While creating a UsersValue value, an extra attribute value was detected. "+ + "A UsersValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra UsersValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewUsersValueUnknown(), diags + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return NewUsersValueUnknown(), diags + } + + idVal, ok := idAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute)) + } + + statusAttribute, ok := attributes["status"] + + if !ok { + diags.AddError( + "Attribute Missing", + `status is missing from object`) + + return NewUsersValueUnknown(), diags + } + + statusVal, ok := statusAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute)) + } + + usernameAttribute, ok := attributes["username"] + + if !ok { + diags.AddError( + "Attribute Missing", + `username is missing from object`) + + return NewUsersValueUnknown(), diags + } + + usernameVal, ok := usernameAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`username expected to be basetypes.StringValue, was: %T`, usernameAttribute)) + } + + if diags.HasError() { + return NewUsersValueUnknown(), diags + } + + return UsersValue{ + Id: idVal, + Status: statusVal, + Username: usernameVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewUsersValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) UsersValue { + object, diags := NewUsersValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewUsersValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t UsersType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewUsersValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewUsersValueUnknown(), nil + } + + if in.IsNull() { + return NewUsersValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewUsersValueMust(UsersValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t UsersType) ValueType(ctx context.Context) attr.Value { + return UsersValue{} +} + +var _ basetypes.ObjectValuable = UsersValue{} + +type UsersValue struct { + Id basetypes.Int64Value `tfsdk:"id"` + Status basetypes.StringValue `tfsdk:"status"` + Username basetypes.StringValue `tfsdk:"username"` + state attr.ValueState +} + +func (v UsersValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 3) + + var val tftypes.Value + var err error + + attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["username"] = basetypes.StringType{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 3) + + val, err = v.Id.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["id"] = val + + val, err = v.Status.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["status"] = val + + val, err = v.Username.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["username"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v UsersValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v UsersValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v UsersValue) String() string { + return "UsersValue" +} + +func (v UsersValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "id": basetypes.Int64Type{}, + "status": basetypes.StringType{}, + "username": basetypes.StringType{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "id": v.Id, + "status": v.Status, + "username": v.Username, + }) + + return objVal, diags +} + +func (v UsersValue) Equal(o attr.Value) bool { + other, ok := o.(UsersValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Id.Equal(other.Id) { + return false + } + + if !v.Status.Equal(other.Status) { + return false + } + + if !v.Username.Equal(other.Username) { + return false + } + + return true +} + +func (v UsersValue) Type(ctx context.Context) attr.Type { + return UsersType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v UsersValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "id": basetypes.Int64Type{}, + "status": basetypes.StringType{}, + "username": basetypes.StringType{}, + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/user/functions.go b/stackit/internal/services/sqlserverflexbeta/user/functions.go new file mode 100644 index 00000000..b565f761 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/user/functions.go @@ -0,0 +1,98 @@ +package sqlserverflexbeta + +import ( + "context" + "fmt" + "math" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + + sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen" +) + +func mapResponseToModel( + ctx context.Context, + resp *sqlserverflexbeta.GetUserResponse, + m *sqlserverflexbetaResGen.UserModel, + tfDiags diag.Diagnostics, +) error { + // TODO: complete and refactor + m.Id = types.StringValue(resp.GetId()) + + /* + sampleList, diags := types.ListValueFrom(ctx, types.StringType, resp.GetList()) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf( + "error converting list response value", + ) + } + sample, diags := sqlserverflexbetaResGen.NewSampleValue( + sqlserverflexbetaResGen.SampleValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "field": types.StringValue(string(resp.GetField())), + }, + ) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf( + "error converting sample response value", + "sample", + types.StringValue(string(resp.GetField())), + ) + } + m.Sample = sample + */ + return nil +} + +func handleEncryption( + m *sqlserverflexbetaResGen.UserModel, + resp *sqlserverflexbeta.GetUserResponse, +) sqlserverflexbetaResGen.EncryptionValue { + if !resp.HasEncryption() || + resp.Encryption == nil || + resp.Encryption.KekKeyId == nil || + resp.Encryption.KekKeyRingId == nil || + resp.Encryption.KekKeyVersion == nil || + resp.Encryption.ServiceAccount == nil { + + if m.Encryption.IsNull() || m.Encryption.IsUnknown() { + return sqlserverflexbetaResGen.NewEncryptionValueNull() + } + return m.Encryption + } + + enc := sqlserverflexbetaResGen.NewEncryptionValueNull() + if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { + enc.KekKeyId = types.StringValue(kVal) + } + if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { + enc.KekKeyRingId = types.StringValue(kkVal) + } + if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { + enc.KekKeyVersion = types.StringValue(kkvVal) + } + if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { + enc.ServiceAccount = types.StringValue(sa) + } + return enc +} + +func toCreatePayload( + ctx context.Context, + model *sqlserverflexbetaResGen.UserModel, +) (*sqlserverflexbeta.CreateUserRequestPayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + return &sqlserverflexbeta.CreateUserRequestPayload{ + // TODO: fill fields + }, nil +} diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go new file mode 100644 index 00000000..e2692e13 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -0,0 +1,411 @@ +package sqlserverflexbeta + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/resources_gen" +) + +var ( + _ resource.Resource = &userResource{} + _ resource.ResourceWithConfigure = &userResource{} + _ resource.ResourceWithImportState = &userResource{} + _ resource.ResourceWithModifyPlan = &userResource{} + _ resource.ResourceWithIdentity = &userResource{} +) + +func NewUserResource() resource.Resource { + return &userResource{} +} + +type userResource struct { + client *sqlserverflexbeta.APIClient + providerData core.ProviderData +} + +type UserResourceIdentityModel struct { + ProjectID types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + UserID types.String `tfsdk:"instance_id"` + // TODO: implement further needed parts +} + +func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user" +} + +func (r *userResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = sqlserverflexbetaResGen.UserResourceSchema(ctx) +} + +func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { + resp.IdentitySchema = identityschema.Schema{ + Attributes: map[string]identityschema.Attribute{ + "project_id": identityschema.StringAttribute{ + RequiredForImport: true, // must be set during import by the practitioner + }, + "region": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "instance_id": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + }, + } +} + +// Configure adds the provider configured client to the resource. +func (r *userResource) Configure( + ctx context.Context, + req resource.ConfigureRequest, + resp *resource.ConfigureResponse, +) { + var ok bool + r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(r.providerData.RoundTripper), + utils.UserAgentConfigOption(r.providerData.Version), + } + if r.providerData.SqlserverflexbetaCustomEndpoint != "" { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.sqlserverflexbetaCustomEndpoint)) + } else { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) + } + apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return + } + r.client = apiClient + tflog.Info(ctx, "sqlserverflexbeta.User client configured") +} + +func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data sqlserverflexbetaResGen.UserModel + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // TODO: Create API call logic + /* + // Generate API request body from model + payload, err := toCreatePayload(ctx, &model) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating User", + fmt.Sprintf("Creating API payload: %v", err), + ) + return + } + // Create new User + createResp, err := r.client.CreateUserRequest( + ctx, + projectId, + region, + ).CreateUserRequestPayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating User", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + UserId := *createResp.Id + */ + + // Example data value setting + data.UserId = types.StringValue("id-from-response") + + // TODO: Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + // TODO: add missing values + UserID: types.StringValue(UserId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + // TODO: implement wait handler if needed + /* + + waitResp, err := wait.CreateUserWaitHandler( + ctx, + r.client, + projectId, + UserId, + region, + ).SetSleepBeforeWait( + 30 * time.Second, + ).SetTimeout( + 90 * time.Minute, + ).WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating User", + fmt.Sprintf("User creation waiting: %v", err), + ) + return + } + + if waitResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating User", + "User creation waiting: returned id is nil", + ) + return + } + + // Map response body to schema + err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating User", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + */ + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "sqlserverflexbeta.User created") +} + +func (r *userResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data sqlserverflexbetaResGen.UserModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Todo: Read API call logic + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + // TODO: Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + // InstanceID: types.StringValue(instanceId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Info(ctx, "sqlserverflexbeta.User read") +} + +func (r *userResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var data sqlserverflexbetaResGen.UserModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Todo: Update API call logic + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "sqlserverflexbeta.User updated") +} + +func (r *userResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data sqlserverflexbetaResGen.UserModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Todo: Delete API call logic + + tflog.Info(ctx, "sqlserverflexbeta.User deleted") +} + +// ModifyPlan implements resource.ResourceWithModifyPlan. +// Use the modifier to set the effective region in the current plan. +func (r *userResource) ModifyPlan( + ctx context.Context, + req resource.ModifyPlanRequest, + resp *resource.ModifyPlanResponse, +) { // nolint:gocritic // function signature required by Terraform + var configModel sqlserverflexbetaResGen.UserModel + // skip initial empty configuration to avoid follow-up errors + if req.Config.Raw.IsNull() { + return + } + resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) + if resp.Diagnostics.HasError() { + return + } + + var planModel sqlserverflexbetaResGen.UserModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) + if resp.Diagnostics.HasError() { + return + } + + utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp) + if resp.Diagnostics.HasError() { + return + } + + var identityModel UserResourceIdentityModel + identityModel.ProjectID = planModel.ProjectId + identityModel.Region = planModel.Region + // TODO: complete + //if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { + // identityModel.InstanceID = planModel.InstanceId + //} + + resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) + if resp.Diagnostics.HasError() { + return + } +} + +// ImportState imports a resource into the Terraform state on success. +// The expected format of the resource import identifier is: project_id,zone_id,record_set_id +func (r *userResource) ImportState( + ctx context.Context, + req resource.ImportStateRequest, + resp *resource.ImportStateResponse, +) { + idParts := strings.Split(req.ID, core.Separator) + + // Todo: Import logic + if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" { + core.LogAndAddError( + ctx, &resp.Diagnostics, + "Error importing database", + fmt.Sprintf( + "Expected import identifier with format [project_id],[region],..., got %q", + req.ID, + ), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + // ... more ... + + core.LogAndAddWarning( + ctx, + &resp.Diagnostics, + "Sqlserverflexbeta database imported with empty password", + "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", + ) + tflog.Info(ctx, "Sqlserverflexbeta user state imported") +} diff --git a/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go new file mode 100644 index 00000000..3bf7f4fc --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go @@ -0,0 +1,111 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema" +) + +func UserResourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "default_database": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The default database for a user of the instance.", + MarkdownDescription: "The default database for a user of the instance.", + }, + "host": schema.StringAttribute{ + Computed: true, + Description: "The host of the instance in which the user belongs to.", + MarkdownDescription: "The host of the instance in which the user belongs to.", + }, + "id": schema.Int64Attribute{ + Computed: true, + Description: "The ID of the user.", + MarkdownDescription: "The ID of the user.", + }, + "instance_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "password": schema.StringAttribute{ + Computed: true, + Description: "The password for the user.", + MarkdownDescription: "The password for the user.", + }, + "port": schema.Int64Attribute{ + Computed: true, + Description: "The port of the instance in which the user belongs to.", + MarkdownDescription: "The port of the instance in which the user belongs to.", + }, + "project_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "roles": schema.ListAttribute{ + ElementType: types.StringType, + Required: true, + Description: "A list containing the user roles for the instance.", + MarkdownDescription: "A list containing the user roles for the instance.", + }, + "status": schema.StringAttribute{ + Computed: true, + Description: "The current status of the user.", + MarkdownDescription: "The current status of the user.", + }, + "uri": schema.StringAttribute{ + Computed: true, + Description: "The connection string for the user to the instance.", + MarkdownDescription: "The connection string for the user to the instance.", + }, + "user_id": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "The ID of the user.", + MarkdownDescription: "The ID of the user.", + }, + "username": schema.StringAttribute{ + Required: true, + Description: "The name of the user.", + MarkdownDescription: "The name of the user.", + }, + }, + } +} + +type UserModel struct { + DefaultDatabase types.String `tfsdk:"default_database"` + Host types.String `tfsdk:"host"` + Id types.Int64 `tfsdk:"id"` + InstanceId types.String `tfsdk:"instance_id"` + Password types.String `tfsdk:"password"` + Port types.Int64 `tfsdk:"port"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Roles types.List `tfsdk:"roles"` + Status types.String `tfsdk:"status"` + Uri types.String `tfsdk:"uri"` + UserId types.Int64 `tfsdk:"user_id"` + Username types.String `tfsdk:"username"` +} diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go new file mode 100644 index 00000000..bd813d82 --- /dev/null +++ b/stackit/internal/wait/sqlserverflexbeta/wait.go @@ -0,0 +1,159 @@ +package sqlserverflexbeta + +import ( + "context" + "errors" + "fmt" + "net/http" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "github.com/stackitcloud/stackit-sdk-go/core/wait" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" +) + +// READY, PENDING, PROGRESSING, FAILURE, UNKNOWN, +const ( + InstanceStateEmpty = "" + InstanceStateSuccess = "READY" + InstanceStatePending = "PENDING" + InstanceStateProcessing = "PROGRESSING" + InstanceStateFailed = "FAILURE" + InstanceStateUnknown = "UNKNOWN" + InstanceStateTerminating = "TERMINATING" +) + +// APIClientInterface Interface needed for tests +type APIClientInterface interface { + GetInstanceRequestExecute(ctx context.Context, projectId, region, instanceId string) (*sqlserverflex.GetInstanceResponse, error) + GetDatabaseRequestExecute(ctx context.Context, projectId string, region string, instanceId string, databaseName string) (*sqlserverflex.GetDatabaseResponse, error) + GetUserRequestExecute(ctx context.Context, projectId string, region string, instanceId string, userId int64) (*sqlserverflex.GetUserResponse, error) +} + +// CreateInstanceWaitHandler will wait for instance creation +func CreateInstanceWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region string, +) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { + handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { + s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) + if err != nil { + return false, nil, err + } + if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { + return false, nil, nil + } + switch strings.ToLower(string(*s.Status)) { + case strings.ToLower(InstanceStateSuccess): + if *s.Network.AccessScope == "SNA" { + if s.Network.InstanceAddress == nil { + tflog.Info(ctx, "Waiting for instance_address") + return false, nil, nil + } + if s.Network.RouterAddress == nil { + tflog.Info(ctx, "Waiting for router_address") + return false, nil, nil + } + } + return true, s, nil + case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): + return true, s, fmt.Errorf("create failed for instance with id %s", instanceId) + case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): + tflog.Info(ctx, "request is being handled", map[string]interface{}{ + "status": *s.Status, + }) + return false, nil, nil + default: + tflog.Info(ctx, "Wait (create) received unknown status", map[string]interface{}{ + "instanceId": instanceId, + "status": s.Status, + }) + return false, s, nil + } + }) + return handler +} + +// UpdateInstanceWaitHandler will wait for instance update +func UpdateInstanceWaitHandler(ctx context.Context, a APIClientInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { + handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { + s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) + if err != nil { + return false, nil, err + } + if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { + return false, nil, nil + } + switch strings.ToLower(string(*s.Status)) { + case strings.ToLower(InstanceStateSuccess): + return true, s, nil + case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): + return true, s, fmt.Errorf("update failed for instance with id %s", instanceId) + case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): + tflog.Info(ctx, "request is being handled", map[string]interface{}{ + "status": *s.Status, + }) + return false, nil, nil + default: + tflog.Info(ctx, "Wait (update) received unknown status", map[string]interface{}{ + "instanceId": instanceId, + "status": s.Status, + }) + return false, s, nil + } + }) + handler.SetSleepBeforeWait(15 * time.Second) + handler.SetTimeout(45 * time.Minute) + return handler +} + +// DeleteInstanceWaitHandler will wait for instance deletion +func DeleteInstanceWaitHandler(ctx context.Context, a APIClientInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[struct{}] { + handler := wait.New(func() (waitFinished bool, response *struct{}, err error) { + _, err = a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) + if err == nil { + return false, nil, nil + } + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return true, nil, nil + }) + handler.SetTimeout(15 * time.Minute) + return handler +} + +// CreateDatabaseWaitHandler will wait for instance creation +func CreateDatabaseWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region, databaseName string, +) *wait.AsyncActionHandler[sqlserverflex.GetDatabaseResponse] { + handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetDatabaseResponse, err error) { + s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName) + if err != nil { + return false, nil, err + } + if s == nil || s.Name == nil || *s.Name != databaseName { + return false, nil, nil + } + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return true, nil, nil + }) + return handler +} diff --git a/stackit/internal/wait/sqlserverflexbeta/wait_test.go b/stackit/internal/wait/sqlserverflexbeta/wait_test.go new file mode 100644 index 00000000..e1ccc9c5 --- /dev/null +++ b/stackit/internal/wait/sqlserverflexbeta/wait_test.go @@ -0,0 +1,258 @@ +package sqlserverflexbeta + +import ( + "context" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" +) + +// Used for testing instance operations +type apiClientInstanceMocked struct { + instanceId string + instanceState string + instanceNetwork sqlserverflex.InstanceNetwork + instanceIsDeleted bool + instanceGetFails bool +} + +func (a *apiClientInstanceMocked) GetInstanceRequestExecute(_ context.Context, _, _, _ string) (*sqlserverflex.GetInstanceResponse, error) { + if a.instanceGetFails { + return nil, &oapierror.GenericOpenAPIError{ + StatusCode: 500, + } + } + + if a.instanceIsDeleted { + return nil, &oapierror.GenericOpenAPIError{ + StatusCode: 404, + } + } + + return &sqlserverflex.GetInstanceResponse{ + Id: &a.instanceId, + Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(&a.instanceState), + Network: &a.instanceNetwork, + }, nil +} +func TestCreateInstanceWaitHandler(t *testing.T) { + t.Skip("skipping - needs refactoring") + tests := []struct { + desc string + instanceGetFails bool + instanceState string + instanceNetwork sqlserverflex.InstanceNetwork + usersGetErrorStatus int + wantErr bool + wantRes *sqlserverflex.GetInstanceResponse + }{ + { + desc: "create_succeeded", + instanceGetFails: false, + instanceState: InstanceStateSuccess, + instanceNetwork: sqlserverflex.InstanceNetwork{ + AccessScope: nil, + Acl: nil, + InstanceAddress: utils.Ptr("10.0.0.1"), + RouterAddress: utils.Ptr("10.0.0.2"), + }, + wantErr: false, + wantRes: &sqlserverflex.GetInstanceResponse{ + BackupSchedule: nil, + Edition: nil, + Encryption: nil, + FlavorId: nil, + Id: nil, + IsDeletable: nil, + Name: nil, + Network: &sqlserverflex.InstanceNetwork{ + AccessScope: nil, + Acl: nil, + InstanceAddress: utils.Ptr("10.0.0.1"), + RouterAddress: utils.Ptr("10.0.0.2"), + }, + Replicas: nil, + RetentionDays: nil, + Status: nil, + Storage: nil, + Version: nil, + }, + }, + { + desc: "create_failed", + instanceGetFails: false, + instanceState: InstanceStateFailed, + wantErr: true, + wantRes: nil, + }, + { + desc: "create_failed_2", + instanceGetFails: false, + instanceState: InstanceStateEmpty, + wantErr: true, + wantRes: nil, + }, + { + desc: "instance_get_fails", + instanceGetFails: true, + wantErr: true, + wantRes: nil, + }, + { + desc: "timeout", + instanceGetFails: false, + instanceState: InstanceStateProcessing, + wantErr: true, + wantRes: nil, + }, + } + for _, tt := range tests { + t.Run(tt.desc, func(t *testing.T) { + instanceId := "foo-bar" + + apiClient := &apiClientInstanceMocked{ + instanceId: instanceId, + instanceState: tt.instanceState, + instanceGetFails: tt.instanceGetFails, + } + + handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + + gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) + if (err != nil) != tt.wantErr { + t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) + } + + if !cmp.Equal(gotRes, tt.wantRes) { + t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes) + } + }) + } +} + +func TestUpdateInstanceWaitHandler(t *testing.T) { + t.Skip("skipping - needs refactoring") + tests := []struct { + desc string + instanceGetFails bool + instanceState string + wantErr bool + wantResp bool + }{ + { + desc: "update_succeeded", + instanceGetFails: false, + instanceState: InstanceStateSuccess, + wantErr: false, + wantResp: true, + }, + { + desc: "update_failed", + instanceGetFails: false, + instanceState: InstanceStateFailed, + wantErr: true, + wantResp: true, + }, + { + desc: "update_failed_2", + instanceGetFails: false, + instanceState: InstanceStateEmpty, + wantErr: true, + wantResp: true, + }, + { + desc: "get_fails", + instanceGetFails: true, + wantErr: true, + wantResp: false, + }, + { + desc: "timeout", + instanceGetFails: false, + instanceState: InstanceStateProcessing, + wantErr: true, + wantResp: true, + }, + } + for _, tt := range tests { + t.Run(tt.desc, func(t *testing.T) { + instanceId := "foo-bar" + + apiClient := &apiClientInstanceMocked{ + instanceId: instanceId, + instanceState: tt.instanceState, + instanceGetFails: tt.instanceGetFails, + } + + var wantRes *sqlserverflex.GetInstanceResponse + if tt.wantResp { + wantRes = &sqlserverflex.GetInstanceResponse{ + Id: &instanceId, + Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)), + } + } + + handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + + gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) + + if (err != nil) != tt.wantErr { + t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) + } + if !cmp.Equal(gotRes, wantRes) { + t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes) + } + }) + } +} + +func TestDeleteInstanceWaitHandler(t *testing.T) { + tests := []struct { + desc string + instanceGetFails bool + instanceState string + wantErr bool + }{ + { + desc: "delete_succeeded", + instanceGetFails: false, + instanceState: InstanceStateSuccess, + wantErr: false, + }, + { + desc: "delete_failed", + instanceGetFails: false, + instanceState: InstanceStateFailed, + wantErr: true, + }, + { + desc: "get_fails", + instanceGetFails: true, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.desc, func(t *testing.T) { + instanceId := "foo-bar" + + apiClient := &apiClientInstanceMocked{ + instanceGetFails: tt.instanceGetFails, + instanceIsDeleted: tt.instanceState == InstanceStateSuccess, + instanceId: instanceId, + instanceState: tt.instanceState, + } + + handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + + _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background()) + + if (err != nil) != tt.wantErr { + t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/stackit/provider.go b/stackit/provider.go index 22ade416..f05e204c 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -26,11 +26,15 @@ import ( postgresflexalphaFlavors "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors" postgresFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance" postgresFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user" + sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor" + sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database" sqlserverFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor" sqlServerFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance" sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user" - sqlserverflexalphaVersion "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/version" + + sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database" + sqlserverflexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" ) // Ensure the implementation satisfies the expected interfaces @@ -502,11 +506,14 @@ func (p *Provider) DataSources(_ context.Context) []func() datasource.DataSource postgresFlexAlphaUser.NewUserDataSource, postgresflexalphaFlavors.NewFlavorsDataSource, - sqlserverflexalphaVersion.NewVersionDataSource, sqlserverFlexAlphaFlavor.NewFlavorDataSource, sqlServerFlexAlphaInstance.NewInstanceDataSource, sqlserverFlexAlphaUser.NewUserDataSource, sqlserverflexalphaDatabase.NewDatabaseDataSource, + + sqlserverflexBetaDatabase.NewDatabaseDataSource, + sqlserverflexBetaInstance.NewInstanceDataSource, + sqlserverFlexBetaFlavor.NewFlavorDataSource, } } @@ -516,9 +523,13 @@ func (p *Provider) Resources(_ context.Context) []func() resource.Resource { postgresFlexAlphaDatabase.NewDatabaseResource, postgresFlexAlphaInstance.NewInstanceResource, postgresFlexAlphaUser.NewUserResource, + sqlServerFlexAlphaInstance.NewInstanceResource, sqlserverFlexAlphaUser.NewUserResource, sqlserverflexalphaDatabase.NewDatabaseResource, + + sqlserverflexBetaInstance.NewInstanceResource, + sqlserverflexBetaDatabase.NewDatabaseResource, } return resources } diff --git a/tools/tools.go b/tools/tools.go index 7023ef96..075c26d5 100644 --- a/tools/tools.go +++ b/tools/tools.go @@ -1,9 +1,5 @@ package tools -// Generate copyright headers -// nolint:misspell // copywrite is correct here -//go:generate go run github.com/hashicorp/copywrite headers -d .. --config ../.copywrite.hcl - // Format Terraform code for use in documentation. // If you do not have Terraform installed, you can remove the formatting command, but it is suggested // to ensure the documentation is formatted properly. From c22e758b2c02ce0cd4b812a1ab800ae1ad9d9f0e Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Thu, 5 Feb 2026 15:11:41 +0000 Subject: [PATCH 02/41] fix: sqlserver_beta (#33) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/33 Reviewed-by: Andre_Harms Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- cmd/cmd/build/build.go | 58 ++++++- .../templates/data_source_scaffold.gotmpl | 67 +++++--- cmd/cmd/getFieldsCmd.go | 147 ++++++++++++++++++ cmd/main.go | 1 + .../sqlserverflexbeta_database.md | 6 +- docs/data-sources/sqlserverflexbeta_flavor.md | 13 +- .../sqlserverflex/beta/database_config.yml | 11 +- ...ion_config.yml.bak => versions_config.yml} | 0 .../sqlserverflexalpha/user/resource.go | 10 +- .../sqlserverflexbeta/database/datasource.go | 39 +++-- ...rce_gen.go => database_data_source_fix.go} | 7 +- .../sqlserverflexbeta/database/resource.go | 115 +++++++++----- .../sqlserverflexbeta/flavor/datasource.go | 46 ++++-- .../internal/wait/sqlserverflexalpha/wait.go | 38 +++++ 14 files changed, 456 insertions(+), 102 deletions(-) create mode 100644 cmd/cmd/getFieldsCmd.go rename service_specs/sqlserverflex/beta/{version_config.yml.bak => versions_config.yml} (100%) rename stackit/internal/services/sqlserverflexbeta/database/datasources_gen/{database_data_source_gen.go => database_data_source_fix.go} (95%) diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go index 3351d5da..a6e0acf9 100644 --- a/cmd/cmd/build/build.go +++ b/cmd/cmd/build/build.go @@ -4,6 +4,9 @@ import ( "bytes" "errors" "fmt" + "go/ast" + "go/parser" + "go/token" "io" "log" "log/slog" @@ -260,6 +263,7 @@ type templateData struct { NameCamel string NamePascal string NameSnake string + Fields []string } func fileExists(path string) bool { @@ -317,11 +321,16 @@ func createBoilerplate(rootFolder, folder string) error { return errors.New("resource name is invalid") } + fields, tokenErr := getTokens(dsFile) + if tokenErr != nil { + return fmt.Errorf("error reading tokens: %w", tokenErr) + } + tplName := "data_source_scaffold.gotmpl" err = writeTemplateToFile( tplName, path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName), - path.Join(folder, svc.Name(), res.Name(), "datasource.go"), + dsGoFile, &templateData{ PackageName: svc.Name(), PackageNameCamel: ToCamelCase(svc.Name()), @@ -329,6 +338,7 @@ func createBoilerplate(rootFolder, folder string) error { NameCamel: ToCamelCase(resourceName), NamePascal: ToPascalCase(resourceName), NameSnake: resourceName, + Fields: fields, }, ) if err != nil { @@ -342,11 +352,16 @@ func createBoilerplate(rootFolder, folder string) error { return errors.New("resource name is invalid") } + fields, tokenErr := getTokens(resFile) + if tokenErr != nil { + return fmt.Errorf("error reading tokens: %w", tokenErr) + } + tplName := "resource_scaffold.gotmpl" err = writeTemplateToFile( tplName, path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName), - path.Join(folder, svc.Name(), res.Name(), "resource.go"), + resGoFile, &templateData{ PackageName: svc.Name(), PackageNameCamel: ToCamelCase(svc.Name()), @@ -354,6 +369,7 @@ func createBoilerplate(rootFolder, folder string) error { NameCamel: ToCamelCase(resourceName), NamePascal: ToPascalCase(resourceName), NameSnake: resourceName, + Fields: fields, }, ) if err != nil { @@ -813,3 +829,41 @@ func getRoot() (*string, error) { lines := strings.Split(string(out), "\n") return &lines[0], nil } + +func getTokens(fileName string) ([]string, error) { + fset := token.NewFileSet() + + var result []string + + node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments) + if err != nil { + return nil, err + } + + ast.Inspect(node, func(n ast.Node) bool { + // Suche nach Typ-Deklarationen (structs) + ts, ok := n.(*ast.TypeSpec) + if ok { + if strings.Contains(ts.Name.Name, "Model") { + // fmt.Printf("found model: %s\n", ts.Name.Name) + ast.Inspect(ts, func(sn ast.Node) bool { + tts, tok := sn.(*ast.Field) + if tok { + // fmt.Printf(" found: %+v\n", tts.Names[0]) + // spew.Dump(tts.Type) + + result = append(result, tts.Names[0].String()) + + //fld, fldOk := tts.Type.(*ast.Ident) + //if fldOk { + // fmt.Printf("type: %+v\n", fld) + //} + } + return true + }) + } + } + return true + }) + return result, nil +} diff --git a/cmd/cmd/build/templates/data_source_scaffold.gotmpl b/cmd/cmd/build/templates/data_source_scaffold.gotmpl index 74fc0f91..ba4e8095 100644 --- a/cmd/cmd/build/templates/data_source_scaffold.gotmpl +++ b/cmd/cmd/build/templates/data_source_scaffold.gotmpl @@ -6,15 +6,16 @@ import ( "net/http" "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" {{.PackageName}}Pkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}" - {{.PackageName}}Utils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/utils" - {{.PackageName}}Gen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/datasources_gen" ) @@ -26,6 +27,11 @@ func New{{.NamePascal}}DataSource() datasource.DataSource { return &{{.NameCamel}}DataSource{} } +type dsModel struct { + {{.PackageName}}Gen.{{.NamePascal}}Model + TfId types.String `tfsdk:"id"` +} + type {{.NameCamel}}DataSource struct{ client *{{.PackageName}}Pkg.APIClient providerData core.ProviderData @@ -37,6 +43,11 @@ func (d *{{.NameCamel}}DataSource) Metadata(_ context.Context, req datasource.Me func (d *{{.NameCamel}}DataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = {{.PackageName}}Gen.{{.NamePascal}}DataSourceSchema(ctx) + resp.Schema.Attributes["id"] = schema.StringAttribute{ + Computed: true, + Description: "The terraform internal identifier.", + MarkdownDescription: "The terraform internal identifier.", + } } // Configure adds the provider configured client to the data source. @@ -51,8 +62,30 @@ func (d *{{.NameCamel}}DataSource) Configure( return } - apiClient := {{.PackageName}}Utils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(d.providerData.RoundTripper), + utils.UserAgentConfigOption(d.providerData.Version), + } + if d.providerData.{{.PackageNamePascal}}CustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(d.providerData.{{.PackageNamePascal}}CustomEndpoint), + ) + } else { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithRegion(d.providerData.GetRegion()), + ) + } + apiClient, err := {{.PackageName}}Pkg.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) return } d.client = apiClient @@ -60,7 +93,7 @@ func (d *{{.NameCamel}}DataSource) Configure( } func (d *{{.NameCamel}}DataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data {{.PackageName}}Gen.{{.NamePascal}}Model + var data dsModel // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) @@ -77,8 +110,11 @@ func (d *{{.NameCamel}}DataSource) Read(ctx context.Context, req datasource.Read ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) + + // TODO: implement needed fields ctx = tflog.SetField(ctx, "{{.NameCamel}}_id", {{.NameCamel}}Id) + // TODO: refactor to correct implementation {{.NameCamel}}Resp, err := d.client.Get{{.NamePascal}}Request(ctx, projectId, region, {{.NameCamel}}Id).Execute() if err != nil { utils.LogError( @@ -98,22 +134,15 @@ func (d *{{.NameCamel}}DataSource) Read(ctx context.Context, req datasource.Read ctx = core.LogResponse(ctx) - // Todo: Read API call logic + data.TfId = utils.BuildInternalTerraformId(projectId, region, ..) - // Example data value setting - // data.Id = types.StringValue("example-id") - - err = mapResponseToModel(ctx, {{.NameCamel}}Resp, &data, resp.Diagnostics) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - fmt.Sprintf("%s Read", errorPrefix), - fmt.Sprintf("Processing API payload: %v", err), - ) - return - } + // TODO: fill remaining fields +{{- range .Fields }} + // data.{{.}} = types.Sometype(apiResponse.Get{{.}}()) +{{- end -}} // Save data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, fmt.Sprintf("%s read successful", errorPrefix)) } diff --git a/cmd/cmd/getFieldsCmd.go b/cmd/cmd/getFieldsCmd.go new file mode 100644 index 00000000..f24db87d --- /dev/null +++ b/cmd/cmd/getFieldsCmd.go @@ -0,0 +1,147 @@ +package cmd + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "path" + "path/filepath" + "strings" + + "github.com/spf13/cobra" +) + +var ( + inFile string + svcName string + resName string + resType string + filePath string +) + +var getFieldsCmd = &cobra.Command{ + Use: "get-fields", + Short: "get fields from file", + Long: `...`, + PreRunE: func(cmd *cobra.Command, args []string) error { + typeStr := "data_source" + if resType != "resource" && resType != "datasource" { + return fmt.Errorf("--type can only be resource or datasource") + } + + if resType == "resource" { + typeStr = resType + } + + if inFile == "" && svcName == "" && resName == "" { + return fmt.Errorf("--infile or --service and --resource must be provided") + } + + if inFile != "" { + if svcName != "" || resName != "" { + return fmt.Errorf("--infile is provided and excludes --service and --resource") + } + p, err := filepath.Abs(inFile) + if err != nil { + return err + } + filePath = p + return nil + } + + if svcName != "" && resName == "" { + return fmt.Errorf("if --service is provided, you MUST also provide --resource") + } + + if svcName == "" && resName != "" { + return fmt.Errorf("if --resource is provided, you MUST also provide --service") + } + + p, err := filepath.Abs( + path.Join( + "stackit", + "internal", + "services", + svcName, + resName, + fmt.Sprintf("%ss_gen", resType), + fmt.Sprintf("%s_%s_gen.go", resName, typeStr), + ), + ) + if err != nil { + return err + } + filePath = p + + //// Enum check + //switch format { + //case "json", "yaml": + //default: + // return fmt.Errorf("invalid --format: %s (want json|yaml)", format) + //} + return nil + }, + RunE: func(cmd *cobra.Command, args []string) error { + return getFields(filePath) + }, +} + +func getFields(f string) error { + tokens, err := getTokens(f) + if err != nil { + return err + } + for _, item := range tokens { + fmt.Printf("%s \n", item) + } + return nil +} + +func getTokens(fileName string) ([]string, error) { + fset := token.NewFileSet() + var result []string + + node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments) + if err != nil { + return nil, err + } + + ast.Inspect(node, func(n ast.Node) bool { + // Suche nach Typ-Deklarationen (structs) + ts, ok := n.(*ast.TypeSpec) + if ok { + if strings.Contains(ts.Name.Name, "Model") { + // fmt.Printf("found model: %s\n", ts.Name.Name) + ast.Inspect(ts, func(sn ast.Node) bool { + tts, tok := sn.(*ast.Field) + if tok { + // fmt.Printf(" found: %+v\n", tts.Names[0]) + // spew.Dump(tts.Type) + + result = append(result, tts.Names[0].String()) + + //fld, fldOk := tts.Type.(*ast.Ident) + //if fldOk { + // fmt.Printf("type: %+v\n", fld) + //} + } + return true + }) + } + } + return true + }) + return result, nil +} + +func NewGetFieldsCmd() *cobra.Command { + return getFieldsCmd +} + +func init() { // nolint: gochecknoinits + getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path") + getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name") + getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name") + getFieldsCmd.Flags().StringVarP(&resType, "type", "t", "resource", "resource type (data-source or resource [default])") +} diff --git a/cmd/main.go b/cmd/main.go index 52753a18..1b80b034 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -28,6 +28,7 @@ func main() { rootCmd.AddCommand( cmd.NewBuildCmd(), cmd.NewPublishCmd(), + cmd.NewGetFieldsCmd(), ) err := rootCmd.Execute() diff --git a/docs/data-sources/sqlserverflexbeta_database.md b/docs/data-sources/sqlserverflexbeta_database.md index 98a29d9f..9322049f 100644 --- a/docs/data-sources/sqlserverflexbeta_database.md +++ b/docs/data-sources/sqlserverflexbeta_database.md @@ -28,15 +28,13 @@ data "stackitprivatepreview_sqlserverflexbeta_database" "example" { - `database_name` (String) The name of the database. - `instance_id` (String) The ID of the instance. - `project_id` (String) The STACKIT project ID. - -### Optional - - `region` (String) The region which should be addressed ### Read-Only - `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. - `compatibility_level` (Number) CompatibilityLevel of the Database. -- `id` (Number) The id of the database. +- `id` (String) The terraform internal identifier. - `name` (String) The name of the database. - `owner` (String) The owner of the database. +- `tf_original_api_id` (Number) The id of the database. diff --git a/docs/data-sources/sqlserverflexbeta_flavor.md b/docs/data-sources/sqlserverflexbeta_flavor.md index 6c1569be..4d2a32f3 100644 --- a/docs/data-sources/sqlserverflexbeta_flavor.md +++ b/docs/data-sources/sqlserverflexbeta_flavor.md @@ -26,15 +26,22 @@ data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" { ## Schema -### Read-Only +### Required - `cpu` (Number) The cpu count of the instance. +- `node_type` (String) defines the nodeType it can be either single or HA +- `project_id` (String) The project ID of the flavor. +- `ram` (Number) The memory of the instance in Gibibyte. +- `region` (String) The region of the flavor. +- `storage_class` (String) The memory of the instance in Gibibyte. + +### Read-Only + - `description` (String) The flavor description. +- `flavor_id` (String) The id of the instance flavor. - `id` (String) The id of the instance flavor. - `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte. -- `memory` (Number) The memory of the instance in Gibibyte. - `min_gb` (Number) minimum storage which is required to order in Gigabyte. -- `node_type` (String) defines the nodeType it can be either single or HA - `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes)) diff --git a/service_specs/sqlserverflex/beta/database_config.yml b/service_specs/sqlserverflex/beta/database_config.yml index d886fc20..135010d2 100644 --- a/service_specs/sqlserverflex/beta/database_config.yml +++ b/service_specs/sqlserverflex/beta/database_config.yml @@ -1,4 +1,3 @@ - provider: name: stackitprivatepreview @@ -7,7 +6,7 @@ resources: schema: attributes: aliases: - id: databaseId + databaseId: id create: path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases method: POST @@ -18,7 +17,6 @@ resources: path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName} method: DELETE - data_sources: databases: read: @@ -26,9 +24,10 @@ data_sources: method: GET database: - attributes: - aliases: - id: database_id + schema: + attributes: + aliases: + databaseId: id read: path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName} method: GET diff --git a/service_specs/sqlserverflex/beta/version_config.yml.bak b/service_specs/sqlserverflex/beta/versions_config.yml similarity index 100% rename from service_specs/sqlserverflex/beta/version_config.yml.bak rename to service_specs/sqlserverflex/beta/versions_config.yml diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index 2d3978c4..c5cea986 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" + sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha" "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-log/tflog" @@ -405,9 +406,14 @@ func (r *userResource) Delete( ctx = tflog.SetField(ctx, "region", region) // Delete existing record set - err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute() + // err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute() + + // Delete existing record set + _, err := sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). + WaitWithContext(ctx) + //err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err)) + core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err)) return } diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go index 70fbaca4..4cc56ea2 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/datasource.go @@ -6,6 +6,7 @@ import ( "net/http" "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" @@ -30,12 +31,22 @@ type databaseDataSource struct { providerData core.ProviderData } +type dsModel struct { + sqlserverflexbetaGen.DatabaseModel + TfId types.String `tfsdk:"id"` +} + func (d *databaseDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database" } func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = sqlserverflexbetaGen.DatabaseDataSourceSchema(ctx) + resp.Schema.Attributes["id"] = schema.StringAttribute{ + Computed: true, + Description: "The terraform internal identifier.", + MarkdownDescription: "The terraform internal identifier.", + } } // Configure adds the provider configured client to the data source. @@ -81,7 +92,7 @@ func (d *databaseDataSource) Configure( } func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data sqlserverflexbetaGen.DatabaseModel + var data dsModel readErr := "Read DB error" // Read Terraform configuration data into the model @@ -131,19 +142,21 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques ) return } - data.Id = types.Int64Value(dbId) - owner, ok := databaseResp.GetOwnerOk() - if !ok { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - readErr, - "Database creation waiting: returned owner is nil", - ) - return - } - data.Owner = types.StringValue(owner) + data.Id = types.Int64Value(dbId) + data.TfId = utils.BuildInternalTerraformId(projectId, region, instanceId, databaseName) + data.Owner = types.StringValue(databaseResp.GetOwner()) + + // TODO: fill remaining fields + // data.CollationName = types.Sometype(apiResponse.GetCollationName()) + // data.CompatibilityLevel = types.Sometype(apiResponse.GetCompatibilityLevel()) + // data.DatabaseName = types.Sometype(apiResponse.GetDatabaseName()) + // data.Id = types.Sometype(apiResponse.GetId()) + // data.InstanceId = types.Sometype(apiResponse.GetInstanceId()) + // data.Name = types.Sometype(apiResponse.GetName()) + // data.Owner = types.Sometype(apiResponse.GetOwner()) + // data.ProjectId = types.Sometype(apiResponse.GetProjectId()) + // data.Region = types.Sometype(apiResponse.GetRegion()) // Save data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_fix.go similarity index 95% rename from stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go rename to stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_fix.go index cfdc1a86..92b1064e 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go +++ b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_fix.go @@ -29,7 +29,7 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema { Description: "The name of the database.", MarkdownDescription: "The name of the database.", }, - "id": schema.Int64Attribute{ + "tf_original_api_id": schema.Int64Attribute{ Computed: true, Description: "The id of the database.", MarkdownDescription: "The id of the database.", @@ -55,8 +55,7 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema { MarkdownDescription: "The STACKIT project ID.", }, "region": schema.StringAttribute{ - Optional: true, - Computed: true, + Required: true, Description: "The region which should be addressed", MarkdownDescription: "The region which should be addressed", Validators: []validator.String{ @@ -73,7 +72,7 @@ type DatabaseModel struct { CollationName types.String `tfsdk:"collation_name"` CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"` DatabaseName types.String `tfsdk:"database_name"` - Id types.Int64 `tfsdk:"id"` + Id types.Int64 `tfsdk:"tf_original_api_id"` InstanceId types.String `tfsdk:"instance_id"` Name types.String `tfsdk:"name"` Owner types.String `tfsdk:"owner"` diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index b28f5ea0..5ae1d6c4 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -244,6 +244,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { var data sqlserverflexbetaResGen.DatabaseModel + readErr := "[Database Read]" // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -265,22 +266,34 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) - // Todo: Read API call logic + instanceId := identityData.InstanceID.ValueString() + ctx = tflog.SetField(ctx, "instance_id", instanceId) - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + dbName := identityData.DatabaseName.ValueString() + ctx = tflog.SetField(ctx, "database_name", dbName) - // TODO: Set data returned by API in identity - identity := DatabaseResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - // InstanceID: types.StringValue(instanceId), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { + getResp, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, dbName).Execute() + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + readErr, + fmt.Sprintf("Calling API: %v", err), + ) return } + ctx = core.LogResponse(ctx) + + data.Id = types.Int64Value(getResp.GetId()) + data.Owner = types.StringValue(getResp.GetOwner()) + data.Name = types.StringValue(getResp.GetName()) + data.DatabaseName = types.StringValue(getResp.GetName()) + data.CollationName = types.StringValue(getResp.GetCollationName()) + data.CompatibilityLevel = types.Int64Value(getResp.GetCompatibilityLevel()) + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) tflog.Info(ctx, "sqlserverflexbeta.Database read") } @@ -350,11 +363,13 @@ func (r *databaseResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - var configModel sqlserverflexbetaResGen.DatabaseModel + // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return } + + var configModel sqlserverflexbetaResGen.DatabaseModel resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) if resp.Diagnostics.HasError() { return @@ -374,10 +389,14 @@ func (r *databaseResource) ModifyPlan( var identityModel DatabaseResourceIdentityModel identityModel.ProjectID = planModel.ProjectId identityModel.Region = planModel.Region - // TODO: complete - //if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { - // identityModel.InstanceID = planModel.InstanceId - //} + + if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { + identityModel.InstanceID = planModel.InstanceId + } + + if !planModel.Name.IsNull() && !planModel.Name.IsUnknown() { + identityModel.DatabaseName = planModel.Name + } resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) if resp.Diagnostics.HasError() { @@ -397,30 +416,54 @@ func (r *databaseResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - idParts := strings.Split(req.ID, core.Separator) + ctx = core.InitProviderContext(ctx) - // Todo: Import logic - if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" { - core.LogAndAddError( - ctx, &resp.Diagnostics, - "Error importing database", - fmt.Sprintf( - "Expected import identifier with format [project_id],[region],..., got %q", - req.ID, - ), - ) + if req.ID != "" { + idParts := strings.Split(req.ID, core.Separator) + + if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { + core.LogAndAddError(ctx, &resp.Diagnostics, + "Error importing database", + fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id],[database_name] Got: %q", req.ID), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), idParts[3])...) + + var identityData DatabaseResourceIdentityModel + identityData.ProjectID = types.StringValue(idParts[0]) + identityData.Region = types.StringValue(idParts[1]) + identityData.InstanceID = types.StringValue(idParts[2]) + identityData.DatabaseName = types.StringValue(idParts[3]) + + resp.Diagnostics.Append(resp.Identity.Set(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Info(ctx, "Sqlserverflexbeta database state imported") return } - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) - // ... more ... + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), identityData.DatabaseName.ValueString())...) + + resp.Diagnostics.Append(resp.Identity.Set(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } - core.LogAndAddWarning( - ctx, - &resp.Diagnostics, - "Sqlserverflexbeta database imported with empty password", - "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", - ) tflog.Info(ctx, "Sqlserverflexbeta database state imported") } diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go index 55f12a9a..59f3982c 100644 --- a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go @@ -92,17 +92,47 @@ func (r *flavorDataSource) Configure(ctx context.Context, req datasource.Configu return } r.client = apiClient - tflog.Info(ctx, "Postgres Flex instance client configured") + tflog.Info(ctx, "SQL Server Flex instance client configured") } func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ + "project_id": schema.StringAttribute{ + Required: true, + Description: "The project ID of the flavor.", + MarkdownDescription: "The project ID of the flavor.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region of the flavor.", + MarkdownDescription: "The region of the flavor.", + }, "cpu": schema.Int64Attribute{ - Computed: true, + Required: true, Description: "The cpu count of the instance.", MarkdownDescription: "The cpu count of the instance.", }, + "ram": schema.Int64Attribute{ + Required: true, + Description: "The memory of the instance in Gibibyte.", + MarkdownDescription: "The memory of the instance in Gibibyte.", + }, + "storage_class": schema.StringAttribute{ + Required: true, + Description: "The memory of the instance in Gibibyte.", + MarkdownDescription: "The memory of the instance in Gibibyte.", + }, + "node_type": schema.StringAttribute{ + Required: true, + Description: "defines the nodeType it can be either single or HA", + MarkdownDescription: "defines the nodeType it can be either single or HA", + }, + "flavor_id": schema.StringAttribute{ + Computed: true, + Description: "The id of the instance flavor.", + MarkdownDescription: "The id of the instance flavor.", + }, "description": schema.StringAttribute{ Computed: true, Description: "The flavor description.", @@ -118,21 +148,11 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques Description: "maximum storage which can be ordered for the flavor in Gigabyte.", MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", }, - "memory": schema.Int64Attribute{ - Computed: true, - Description: "The memory of the instance in Gibibyte.", - MarkdownDescription: "The memory of the instance in Gibibyte.", - }, "min_gb": schema.Int64Attribute{ Computed: true, Description: "minimum storage which is required to order in Gigabyte.", MarkdownDescription: "minimum storage which is required to order in Gigabyte.", }, - "node_type": schema.StringAttribute{ - Computed: true, - Description: "defines the nodeType it can be either single or HA", - MarkdownDescription: "defines the nodeType it can be either single or HA", - }, "storage_classes": schema.ListNestedAttribute{ NestedObject: schema.NestedAttributeObject{ Attributes: map[string]schema.Attribute{ @@ -331,5 +351,5 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, if resp.Diagnostics.HasError() { return } - tflog.Info(ctx, "Postgres Flex flavors read") + tflog.Info(ctx, "SQL Server Flex flavors read") } diff --git a/stackit/internal/wait/sqlserverflexalpha/wait.go b/stackit/internal/wait/sqlserverflexalpha/wait.go index 21bb8f70..542b06cb 100644 --- a/stackit/internal/wait/sqlserverflexalpha/wait.go +++ b/stackit/internal/wait/sqlserverflexalpha/wait.go @@ -32,6 +32,11 @@ type APIClientInstanceInterface interface { GetInstanceRequestExecute(ctx context.Context, projectId, region, instanceId string) (*sqlserverflex.GetInstanceResponse, error) } +// APIClientUserInterface Interface needed for tests +type APIClientUserInterface interface { + DeleteUserRequestExecute(ctx context.Context, projectId string, region string, instanceId string, userId int64) error +} + // CreateInstanceWaitHandler will wait for instance creation func CreateInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { @@ -131,3 +136,36 @@ func DeleteInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface handler.SetTimeout(15 * time.Minute) return handler } + +// DeleteUserWaitHandler will wait for instance deletion +func DeleteUserWaitHandler( + ctx context.Context, + a APIClientUserInterface, + projectId, instanceId, region string, + userId int64, +) *wait.AsyncActionHandler[struct{}] { + handler := wait.New(func() (waitFinished bool, response *struct{}, err error) { + err = a.DeleteUserRequestExecute(ctx, projectId, region, instanceId, userId) + if err == nil { + return false, nil, nil + } + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + + switch oapiErr.StatusCode { + case http.StatusNotFound: + return true, nil, nil + case http.StatusInternalServerError: + tflog.Warn(ctx, "Wait handler got error 500") + return false, nil, nil + default: + return false, nil, err + } + }) + handler.SetTimeout(15 * time.Minute) + handler.SetSleepBeforeWait(15 * time.Second) + return handler +} From 32e41d8b4433b8fec57e6c2bbbebe1cda143febe Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Fri, 6 Feb 2026 09:57:51 +0000 Subject: [PATCH 03/41] feat: testing (#34) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/34 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .github/actions/acc_test/README.md | 1 + .github/actions/acc_test/action.yaml | 114 + .github/actions/build/action.yaml | 1 - .github/workflows/ci.yaml | 4 +- .github/workflows/release.yaml | 2 +- .github/workflows/renovate.yaml | 2 +- .github/workflows/tf-acc-test.yaml | 24 +- README.md | 4 +- cmd/cmd/build/build.go | 62 +- cmd/cmd/buildCmd.go | 11 +- docs/resources/postgresflexalpha_instance.md | 37 +- .../resource.tf | 39 +- go.mod | 22 +- go.sum | 60 +- .../instance/resource_test.go | 280 ++- .../postgresflex_acc_test.go | 296 +-- .../testdata/resource-complete.tf | 25 +- .../sqlserverflexbeta/database/datasource.go | 12 +- .../sqlserverflexbeta/flavors/datasource.go | 118 - .../flavors_data_source_gen.go | 1909 ----------------- .../sqlserverflexbeta/user/datasource.go | 25 +- stackit/internal/testutil/assert.go | 11 + stackit/internal/testutil/testutil.go | 211 +- stackit/provider_acc_test.go | 19 + 24 files changed, 858 insertions(+), 2431 deletions(-) create mode 100644 .github/actions/acc_test/README.md create mode 100644 .github/actions/acc_test/action.yaml delete mode 100644 stackit/internal/services/sqlserverflexbeta/flavors/datasource.go delete mode 100644 stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go create mode 100644 stackit/internal/testutil/assert.go diff --git a/.github/actions/acc_test/README.md b/.github/actions/acc_test/README.md new file mode 100644 index 00000000..c3484cf2 --- /dev/null +++ b/.github/actions/acc_test/README.md @@ -0,0 +1 @@ +# acceptance test action diff --git a/.github/actions/acc_test/action.yaml b/.github/actions/acc_test/action.yaml new file mode 100644 index 00000000..828e1011 --- /dev/null +++ b/.github/actions/acc_test/action.yaml @@ -0,0 +1,114 @@ +name: Acceptance Testing +description: "Acceptance Testing pipeline" + +inputs: + go-version: + description: "go version to install" + default: '1.25' + required: true + + project_id: + description: "STACKIT project ID for tests" + required: true + + region: + description: "STACKIT region for tests" + default: 'eu01' + required: true + + service_account_json: + description: "STACKIT service account JSON file contents" + required: true + + test_file: + description: "testfile to run" + default: '' + +outputs: + random-number: + description: "Random number" + value: ${{ steps.random-number-generator.outputs.random-number }} + +runs: + using: "composite" + steps: + - name: Random Number Generator + id: random-number-generator + run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT + shell: bash + + - name: Install needed tools + shell: bash + run: | + set -e + apt-get -y -qq update + apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget + + - name: Setup JAVA + uses: actions/setup-java@v5 + with: + distribution: 'temurin' # See 'Supported distributions' for available options + java-version: '21' + + - name: Install Go ${{ inputs.go-version }} + uses: actions/setup-go@v6 + with: + go-version: ${{ inputs.go-version }} + check-latest: true + go-version-file: 'go.mod' + + - name: Install go tools + shell: bash + run: | + set -e + go mod download + go install golang.org/x/tools/cmd/goimports@latest + go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2 + go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0 + + - name: Prepare pkg_gen directory + shell: bash + run: | + go run cmd/main.go build -p + + - name: Run acceptance test file + if: ${{ inputs.test_file != '' }} + shell: bash + run: | + echo "Running acceptance tests for the terraform provider" + echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json + cd stackit + TF_ACC=1 \ + TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \ + TF_ACC_REGION=${TF_ACC_REGION} \ + go test ${{ inputs.test_file }} -count=1 -timeout=30m + env: + STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }} + TF_PROJECT_ID: ${{ inputs.project_id }} + TF_ACC_REGION: ${{ inputs.region }} + # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }} + # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }} + # TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }} + # TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }} + # TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }} + + - name: Run acceptance tests + if: ${{ inputs.test_file == '' }} + shell: bash + run: | + echo "Running acceptance tests for the terraform provider" + echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json + cd stackit + TF_ACC=1 \ + TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \ + TF_ACC_REGION=${TF_ACC_REGION} \ + go test ./... -count=1 -timeout=30m + env: + STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }} + TF_PROJECT_ID: ${{ inputs.project_id }} + TF_ACC_REGION: ${{ inputs.region }} + # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }} + # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }} + # TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }} + # TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }} + # TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }} diff --git a/.github/actions/build/action.yaml b/.github/actions/build/action.yaml index fe544618..1fa83ee1 100644 --- a/.github/actions/build/action.yaml +++ b/.github/actions/build/action.yaml @@ -1,4 +1,3 @@ - name: Build description: "Build pipeline" inputs: diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index fbc3f339..186d2b42 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -106,7 +106,7 @@ jobs: needs: config steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Build uses: ./.github/actions/build @@ -150,7 +150,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Check GoReleaser uses: goreleaser/goreleaser-action@v6 diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 254c40f2..7d7106ed 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -18,7 +18,7 @@ jobs: goreleaser: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: # Allow goreleaser to access older tag information. fetch-depth: 0 diff --git a/.github/workflows/renovate.yaml b/.github/workflows/renovate.yaml index 12454b9f..90adebe6 100644 --- a/.github/workflows/renovate.yaml +++ b/.github/workflows/renovate.yaml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Self-hosted Renovate uses: renovatebot/github-action@v41.0.0 with: diff --git a/.github/workflows/tf-acc-test.yaml b/.github/workflows/tf-acc-test.yaml index a8e6a53f..3b4fb061 100644 --- a/.github/workflows/tf-acc-test.yaml +++ b/.github/workflows/tf-acc-test.yaml @@ -7,21 +7,17 @@ on: workflow_dispatch: jobs: - main: + acc_test: name: Acceptance Tests runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 - - name: Install project tools and dependencies - run: make project-tools - - name: Run tests - run: | - make test-acceptance-tf TF_ACC_PROJECT_ID=$${{ secrets.TF_ACC_PROJECT_ID }} TF_ACC_ORGANIZATION_ID=$${{ secrets.TF_ACC_ORGANIZATION_ID }} TF_ACC_REGION="eu01" - env: - STACKIT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_SERVICE_ACCOUNT_TOKEN }} - TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }} - TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }} - TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }} - TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }} - TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }} + uses: actions/checkout@v6 + + - name: Run Test + uses: ./.github/actions/acc_test + with: + go-version: ${{ env.GO_VERSION }} + project_id: ${{ vars.TEST_PROJECT_ID }} + region: 'eu01' + service_account_json: ${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }} diff --git a/README.md b/README.md index 1da34359..018f615e 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,9 @@
-# STACKIT Terraform Provider +# STACKIT Terraform Provider (PRIVATE PREVIEW) -[![Go Report Card](https://goreportcard.com/badge/github.com/stackitcloud/terraform-provider-stackit)](https://goreportcard.com/report/github.com/stackitcloud/terraform-provider-stackit) [![GitHub Release](https://img.shields.io/github/v/release/stackitcloud/terraform-provider-stackit)](https://registry.terraform.io/providers/stackitcloud/stackit/latest) ![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/stackitcloud/terraform-provider-stackit) [![GitHub License](https://img.shields.io/github/license/stackitcloud/terraform-provider-stackit)](https://www.apache.org/licenses/LICENSE-2.0) +[![GitHub Release](https://img.shields.io/github/v/release/stackitcloud/terraform-provider-stackit)](https://registry.terraform.io/providers/stackitcloud/stackit/latest) ![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/stackitcloud/terraform-provider-stackit) [![GitHub License](https://img.shields.io/github/license/stackitcloud/terraform-provider-stackit)](https://www.apache.org/licenses/LICENSE-2.0) This project is the official [Terraform Provider](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs) for [STACKIT](https://www.stackit.de/en/), which allows you to manage STACKIT resources through Terraform. diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go index a6e0acf9..f210b8d2 100644 --- a/cmd/cmd/build/build.go +++ b/cmd/cmd/build/build.go @@ -39,17 +39,15 @@ type version struct { } type Builder struct { - SkipClone bool - SkipCleanup bool + SkipClone bool + SkipCleanup bool + PackagesOnly bool } func (b *Builder) Build() error { slog.Info("Starting Builder") - - slog.Info(" ... Checking needed commands available") - err := checkCommands([]string{"tfplugingen-framework", "tfplugingen-openapi"}) - if err != nil { - return err + if b.PackagesOnly { + slog.Info(" >>> only generating pkg_gen <<<") } root, err := getRoot() @@ -61,13 +59,23 @@ func (b *Builder) Build() error { } slog.Info(" ... using root directory", "dir", *root) - if !b.SkipCleanup { - slog.Info("Cleaning up old generator directory") - err = os.RemoveAll(path.Join(*root, GEN_REPO_NAME)) + if !b.PackagesOnly { + slog.Info(" ... Checking needed commands available") + err := checkCommands([]string{"tfplugingen-framework", "tfplugingen-openapi"}) if err != nil { return err } + } + if !b.SkipCleanup { + slog.Info("Cleaning up old packages directory") + err = os.RemoveAll(path.Join(*root, "pkg_gen")) + if err != nil { + return err + } + } + + if !b.SkipCleanup && !b.PackagesOnly { slog.Info("Cleaning up old packages directory") err = os.RemoveAll(path.Join(*root, "pkg_gen")) if err != nil { @@ -211,24 +219,26 @@ func (b *Builder) Build() error { } } - slog.Info("Generating service boilerplate") - err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME)) - if err != nil { - return err - } + if !b.PackagesOnly { + slog.Info("Generating service boilerplate") + err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME)) + if err != nil { + return err + } - slog.Info("Copying all service files") - err = CopyDirectory( - path.Join(*root, "generated", "internal", "services"), - path.Join(*root, "stackit", "internal", "services"), - ) - if err != nil { - return err - } + slog.Info("Copying all service files") + err = CopyDirectory( + path.Join(*root, "generated", "internal", "services"), + path.Join(*root, "stackit", "internal", "services"), + ) + if err != nil { + return err + } - err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services")) - if err != nil { - return err + err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services")) + if err != nil { + return err + } } if !b.SkipCleanup { diff --git a/cmd/cmd/buildCmd.go b/cmd/cmd/buildCmd.go index 0a239215..8902132e 100644 --- a/cmd/cmd/buildCmd.go +++ b/cmd/cmd/buildCmd.go @@ -6,8 +6,9 @@ import ( ) var ( - skipCleanup bool - skipClone bool + skipCleanup bool + skipClone bool + packagesOnly bool ) var buildCmd = &cobra.Command{ @@ -16,8 +17,9 @@ var buildCmd = &cobra.Command{ Long: `...`, RunE: func(cmd *cobra.Command, args []string) error { b := build.Builder{ - SkipClone: skipClone, - SkipCleanup: skipCleanup, + SkipClone: skipClone, + SkipCleanup: skipCleanup, + PackagesOnly: packagesOnly, } return b.Build() }, @@ -30,4 +32,5 @@ func NewBuildCmd() *cobra.Command { func init() { // nolint: gochecknoinits buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps") buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git") + buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages") } diff --git a/docs/resources/postgresflexalpha_instance.md b/docs/resources/postgresflexalpha_instance.md index 9ec697ba..fb735ecc 100644 --- a/docs/resources/postgresflexalpha_instance.md +++ b/docs/resources/postgresflexalpha_instance.md @@ -13,21 +13,29 @@ description: |- ## Example Usage ```terraform -resource "stackitprivatepreview_postgresflexalpha_instance" "example" { +resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" { project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" name = "example-instance" acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] - backup_schedule = "00 00 * * *" - flavor = { - cpu = 2 - ram = 4 - } - replicas = 3 + backup_schedule = "0 0 * * *" + retention_days = 30 + flavor_id = "flavor.id" + replicas = 1 storage = { - class = "class" - size = 5 + performance_class = "premium-perf2-stackit" + size = 10 } - version = 14 + encryption = { + kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_version = 1 + service_account = "service@account.email" + } + network = { + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + access_scope = "PUBLIC" + } + version = 17 } # Only use the import statement, if you want to import an existing postgresflex instance @@ -35,6 +43,15 @@ import { to = stackitprivatepreview_postgresflexalpha_instance.import-example id = "${var.project_id},${var.region},${var.postgres_instance_id}" } + +import { + to = stackitprivatepreview_postgresflexalpha_instance.import-example + identity = { + project_id = var.project_id + region = var.region + instance_id = var.postgres_instance_id + } +} ``` diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf index 99faf2e7..1f98284a 100644 --- a/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf +++ b/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf @@ -1,22 +1,39 @@ -resource "stackitprivatepreview_postgresflexalpha_instance" "example" { +resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" { project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" name = "example-instance" acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] - backup_schedule = "00 00 * * *" - flavor = { - cpu = 2 - ram = 4 - } - replicas = 3 + backup_schedule = "0 0 * * *" + retention_days = 30 + flavor_id = "flavor.id" + replicas = 1 storage = { - class = "class" - size = 5 + performance_class = "premium-perf2-stackit" + size = 10 } - version = 14 + encryption = { + kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_version = 1 + service_account = "service@account.email" + } + network = { + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + access_scope = "PUBLIC" + } + version = 17 } # Only use the import statement, if you want to import an existing postgresflex instance import { to = stackitprivatepreview_postgresflexalpha_instance.import-example id = "${var.project_id},${var.region},${var.postgres_instance_id}" -} \ No newline at end of file +} + +import { + to = stackitprivatepreview_postgresflexalpha_instance.import-example + identity = { + project_id = var.project_id + region = var.region + instance_id = var.postgres_instance_id + } +} diff --git a/go.mod b/go.mod index 10aca46d..fc21ec9e 100644 --- a/go.mod +++ b/go.mod @@ -13,6 +13,7 @@ require ( github.com/hashicorp/terraform-plugin-testing v1.14.0 github.com/iancoleman/strcase v0.3.0 github.com/ivanpirog/coloredcobra v1.0.1 + github.com/joho/godotenv v1.5.1 github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1 github.com/spf13/cobra v1.10.2 github.com/stackitcloud/stackit-sdk-go/core v0.21.0 @@ -29,22 +30,13 @@ require ( require ( dario.cat/mergo v1.0.1 // indirect - github.com/BurntSushi/toml v1.2.1 // indirect - github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect - github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver/v3 v3.2.0 // indirect - github.com/Masterminds/sprig/v3 v3.2.3 // indirect github.com/ProtonMail/go-crypto v1.3.0 // indirect github.com/agext/levenshtein v1.2.3 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect - github.com/armon/go-radix v1.0.0 // indirect - github.com/bgentry/speakeasy v0.1.0 // indirect - github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect github.com/cloudflare/circl v1.6.2 // indirect github.com/fatih/color v1.18.0 // indirect github.com/golang-jwt/jwt/v5 v5.3.0 // indirect github.com/golang/protobuf v1.5.4 // indirect - github.com/hashicorp/cli v1.1.7 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect @@ -59,38 +51,27 @@ require ( github.com/hashicorp/logutils v1.0.0 // indirect github.com/hashicorp/terraform-exec v0.24.0 // indirect github.com/hashicorp/terraform-json v0.27.2 // indirect - github.com/hashicorp/terraform-plugin-docs v0.24.0 // indirect github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 // indirect github.com/hashicorp/terraform-registry-address v0.4.0 // indirect github.com/hashicorp/terraform-svchost v0.2.0 // indirect github.com/hashicorp/yamux v0.1.2 // indirect - github.com/huandu/xstrings v1.3.3 // indirect - github.com/imdario/mergo v0.3.15 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/kr/text v0.2.0 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/mattn/go-runewidth v0.0.9 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/oklog/run v1.2.0 // indirect - github.com/posener/complete v1.2.3 // indirect - github.com/shopspring/decimal v1.3.1 // indirect - github.com/spf13/cast v1.5.0 // indirect github.com/spf13/pflag v1.0.10 // indirect github.com/stretchr/testify v1.11.1 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - github.com/yuin/goldmark v1.7.7 // indirect - github.com/yuin/goldmark-meta v1.1.0 // indirect github.com/zclconf/go-cty v1.17.0 // indirect - go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect golang.org/x/crypto v0.47.0 // indirect - golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect golang.org/x/mod v0.32.0 // indirect golang.org/x/net v0.49.0 // indirect golang.org/x/sync v0.19.0 // indirect @@ -101,7 +82,6 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect google.golang.org/grpc v1.78.0 // indirect google.golang.org/protobuf v1.36.11 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect ) tool golang.org/x/tools/cmd/goimports diff --git a/go.sum b/go.sum index 59906446..43e5e03e 100644 --- a/go.sum +++ b/go.sum @@ -1,15 +1,5 @@ dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= -github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= -github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= -github.com/Kunde21/markdownfmt/v3 v3.1.0 h1:KiZu9LKs+wFFBQKhrZJrFZwtLnCCWJahL+S+E/3VnM0= -github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc= -github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= -github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= -github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= -github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= -github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= github.com/MatusOllah/slogcolor v1.7.0 h1:Nrd7yBPv2EBEEBEwl7WEPRmMd1ozZzw2jm8SLMYDbKs= github.com/MatusOllah/slogcolor v1.7.0/go.mod h1:5y1H50XuQIBvuYTJlmokWi+4FuPiJN5L7Z0jM4K4bYA= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= @@ -21,12 +11,6 @@ github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= -github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= -github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE= -github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw= github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= github.com/cloudflare/circl v1.6.2 h1:hL7VBpHHKzrV5WTfHCaBsgx/HGbBYlgrwvNXEVDYYsQ= @@ -70,11 +54,8 @@ github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/hashicorp/cli v1.1.7 h1:/fZJ+hNdwfTSfsxMBa9WWMlfjUZbX8/LnUxgAd7lCVU= -github.com/hashicorp/cli v1.1.7/go.mod h1:e6Mfpga9OCT1vqzFuoGZiiF/KaG9CbUfO5s3ghU3YgU= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -87,7 +68,6 @@ github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-plugin v1.7.0 h1:YghfQH/0QmPNc/AZMTFE3ac8fipZyZECHdDPshfk+mA= @@ -109,8 +89,6 @@ github.com/hashicorp/terraform-exec v0.24.0 h1:mL0xlk9H5g2bn0pPF6JQZk5YlByqSqrO5 github.com/hashicorp/terraform-exec v0.24.0/go.mod h1:lluc/rDYfAhYdslLJQg3J0oDqo88oGQAdHR+wDqFvo4= github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU= github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE= -github.com/hashicorp/terraform-plugin-docs v0.24.0 h1:YNZYd+8cpYclQyXbl1EEngbld8w7/LPOm99GD5nikIU= -github.com/hashicorp/terraform-plugin-docs v0.24.0/go.mod h1:YLg+7LEwVmRuJc0EuCw0SPLxuQXw5mW8iJ5ml/kvi+o= github.com/hashicorp/terraform-plugin-framework v1.17.0 h1:JdX50CFrYcYFY31gkmitAEAzLKoBgsK+iaJjDC8OexY= github.com/hashicorp/terraform-plugin-framework v1.17.0/go.mod h1:4OUXKdHNosX+ys6rLgVlgklfxN3WHR5VHSOABeS/BM0= github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow= @@ -129,13 +107,8 @@ github.com/hashicorp/terraform-svchost v0.2.0 h1:wVc2vMiodOHvNZcQw/3y9af1XSomgjG github.com/hashicorp/terraform-svchost v0.2.0/go.mod h1:/98rrS2yZsbppi4VGVCjwYmh8dqsKzISqK7Hli+0rcQ= github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= -github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= -github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= -github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= -github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= @@ -145,6 +118,8 @@ github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOl github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jhump/protoreflect v1.17.0 h1:qOEr613fac2lOuTgWN4tPAtLL7fUSbuJL5X5XumQh94= github.com/jhump/protoreflect v1.17.0/go.mod h1:h9+vUUL38jiBzck8ck+6G/aeMX8Z4QUY/NiJPwPNi+8= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -164,9 +139,6 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= @@ -175,7 +147,6 @@ github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQ github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E= @@ -185,21 +156,13 @@ github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxu github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo= -github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= -github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= -github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g= github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= @@ -214,9 +177,6 @@ github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1 h1:6MJdy1xmdE+uOo/F8mR5HSldjPSHpdhwuqS3u9m2EWQ= github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1/go.mod h1:XLr3ZfrT1g8ZZMm7A6RXOPBuhBkikdUN2o/+/Y+Hu+g= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= @@ -232,16 +192,10 @@ github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/yuin/goldmark v1.7.7 h1:5m9rrB1sW3JUMToKFQfb+FGt1U7r57IHu5GrYrG2nqU= -github.com/yuin/goldmark v1.7.7/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= -github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= -github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= github.com/zclconf/go-cty v1.17.0 h1:seZvECve6XX4tmnvRzWtJNHdscMtYEx5R7bnnVyd/d0= github.com/zclconf/go-cty v1.17.0/go.mod h1:wqFzcImaLTI6A5HfsRwB0nj5n0MRZFwmey8YoFPPs3U= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= -go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw= -go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU= go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= @@ -257,11 +211,8 @@ go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42s go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= -golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME= -golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c= golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU= @@ -269,7 +220,6 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -287,7 +237,6 @@ golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= @@ -295,14 +244,12 @@ golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 h1:i0p03B68+xC1kD2QUO8 golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5/go.mod h1:b7fPSJ0pKZ3ccUh8gnTONJxhn3c/PS6tyzQvyqw4iA8= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -331,9 +278,6 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/stackit/internal/services/postgresflexalpha/instance/resource_test.go b/stackit/internal/services/postgresflexalpha/instance/resource_test.go index 46d935a5..c84c1a5d 100644 --- a/stackit/internal/services/postgresflexalpha/instance/resource_test.go +++ b/stackit/internal/services/postgresflexalpha/instance/resource_test.go @@ -1,12 +1,233 @@ package postgresflexalpha import ( - "reflect" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "os" "testing" - "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil" ) +var ( + validFlavor = "2.4" + kekKeyRingId = "" + kekKeyVersion = "" + kekKeySA = "" +) + +func testAccPreCheck(t *testing.T) { + // TODO: if needed ... +} + +//func TestAccResourceExample_parallel(t *testing.T) { +// t.Parallel() +// +// exData := resData{ +// Region: "eu01", +// ServiceAccountFilePath: sa_file, +// ProjectID: project_id, +// Name: acctest.RandomWithPrefix("tf-acc"), +// } +// +// resource.Test(t, resource.TestCase{ +// ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, +// Steps: []resource.TestStep{ +// { +// Config: testAccResourceEncryptionExampleConfig(exData), +// Check: resource.TestCheckResourceAttrSet("example_resource.test", "id"), +// }, +// }, +// }) +//} + +type resData struct { + ServiceAccountFilePath string + ProjectID string + Region string + Name string + Flavor string + BackupSchedule string + RetentionDays uint32 +} + +func getExample() resData { + return resData{ + Region: testutil.Region, + ServiceAccountFilePath: testutil.ServiceAccountFile, + ProjectID: testutil.ProjectId, + Name: acctest.RandomWithPrefix("tf-acc"), + Flavor: "2.4", + BackupSchedule: "0 0 * * *", + RetentionDays: 33, + } +} + +func TestAccResourceExample_basic(t *testing.T) { + exData := getExample() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccResourceNoEncryptionExampleConfig(exData), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), + resource.TestCheckResourceAttrSet("example_resource.test", "id"), + ), + }, + //// Create and verify + //{ + // Config: testAccResourceNoEncryptionExampleConfig(exData), + // Check: resource.ComposeTestCheckFunc( + // resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), + // ), + //}, + //// Update and verify + //{ + // Config: testAccResourceNoEncryptionExampleConfig(exData), + // Check: resource.ComposeTestCheckFunc( + // resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), + // ), + //}, + // Import test + { + ResourceName: "example_resource.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccResourceNoEncryptionExampleConfig(data resData) string { + return fmt.Sprintf(` + +%[1]s + +resource "stackitprivatepreview_postgresflexalpha_instance" "test" { + project_id = %[2]q + name = %[3]q + backup_schedule = %[4]q + retention_days = %[5]d + flavor_id = %[6]q + replicas = 1 + storage = { + performance_class = "premium-perf2-stackit" + size = 10 + } + network = { + acl = ["0.0.0.0/0"] + access_scope = "PUBLIC" + } + version = 17 +} + +`, + testutil.PostgresFlexProviderConfig(data.ServiceAccountFilePath), + data.ProjectID, + data.Name, + data.BackupSchedule, + data.RetentionDays, + data.Flavor, + data.Name, + ) +} + +func testAccResourceEncryptionExampleConfig(data resData) string { + return fmt.Sprintf(` + +%[1]s + +resource "stackitprivatepreview_postgresflexalpha_instance" "test" { + project_id = %[2]q + name = %[3]q + backup_schedule = "0 0 * * *" + retention_days = 45 + flavor_id = "2.1" + replicas = 1 + storage = { + performance_class = "premium-perf2-stackit" + size = 10 + } + encryption = { + kek_key_id = "key01" + kek_key_ring_id = "key_ring_01" + kek_key_version = 1 + service_account = "service@account.email" + } + network = { + acl = ["0.0.0.0/0"] + access_scope = "PUBLIC" + } + version = 14 +} + +`, + testutil.PostgresFlexProviderConfig(data.ServiceAccountFilePath), + data.ProjectID, + data.Name, + ) +} + +func testCheckResourceExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("resource not found: %s", resourceName) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("resource ID not set") + } + + // Verify resource exists in the API + //client := testAccProvider.Meta().(*APIClient) + //_, err := client.GetResource(rs.Primary.ID) + //if err != nil { + // return fmt.Errorf("error fetching resource: %w", err) + //} + + return nil + } +} + +func setupMockServer() *httptest.Server { + mux := http.NewServeMux() + + mux.HandleFunc("/api/resources", func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case http.MethodPost: + w.WriteHeader(http.StatusCreated) + json.NewEncoder(w).Encode(map[string]string{ + "id": "mock-id-123", + "name": "test-resource", + }) + case http.MethodGet: + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode([]map[string]string{}) + } + }) + + return httptest.NewServer(mux) +} + +func TestUnitResourceCreate(t *testing.T) { + server := setupMockServer() + defer server.Close() + + // Configure provider to use mock server URL + os.Setenv("API_ENDPOINT", server.URL) + + // Run unit tests against mock +} + // type postgresFlexClientMocked struct { // returnError bool // getFlavorsResp *postgresflex.GetFlavorsResponse @@ -20,21 +241,40 @@ import ( // return c.getFlavorsResp, nil // } -func TestNewInstanceResource(t *testing.T) { - tests := []struct { - name string - want resource.Resource - }{ - { - name: "create empty instance resource", - want: &instanceResource{}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := NewInstanceResource(); !reflect.DeepEqual(got, tt.want) { - t.Errorf("NewInstanceResource() = %v, want %v", got, tt.want) - } - }) - } -} +//func TestNewInstanceResource(t *testing.T) { +// exData := resData{ +// Region: "eu01", +// ServiceAccountFilePath: sa_file, +// ProjectID: project_id, +// Name: "testRes", +// } +// resource.Test(t, resource.TestCase{ +// ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, +// Steps: []resource.TestStep{ +// { +// Config: testAccResourceEncryptionExampleConfig(exData), +// Check: resource.ComposeAggregateTestCheckFunc( +// resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), +// resource.TestCheckResourceAttrSet("example_resource.test", "id"), +// ), +// }, +// }, +// }) +// +// //tests := []struct { +// // name string +// // want resource.Resource +// //}{ +// // { +// // name: "create empty instance resource", +// // want: &instanceResource{}, +// // }, +// //} +// //for _, tt := range tests { +// // t.Run(tt.name, func(t *testing.T) { +// // if got := NewInstanceResource(); !reflect.DeepEqual(got, tt.want) { +// // t.Errorf("NewInstanceResource() = %v, want %v", got, tt.want) +// // } +// // }) +// //} +//} diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go index a2920107..2a8a12b5 100644 --- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go +++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go @@ -6,12 +6,15 @@ import ( "context" _ "embed" "fmt" + "log/slog" + "os" "strings" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/joho/godotenv" "github.com/stackitcloud/stackit-sdk-go/core/utils" "github.com/stackitcloud/stackit-sdk-go/core/config" @@ -26,13 +29,31 @@ var ( resourceSecurityGroupMinConfig string //nolint:unused // needs implementation ) +func setup() { + err := godotenv.Load() + if err != nil { + slog.Info("could not find .env file - not loading .env") + return + } + slog.Info("loaded .env file") +} + +func TestMain(m *testing.M) { + setup() + code := m.Run() + // shutdown() + os.Exit(code) +} + // Instance resource data var instanceResource = map[string]string{ "project_id": testutil.ProjectId, + "region": "eu01", "name": fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum)), "acl": "192.168.0.0/16", "backup_schedule": "00 16 * * *", "backup_schedule_updated": "00 12 * * *", + "retention_days": "33", "flavor_cpu": "2", "flavor_ram": "4", "flavor_description": "Small, Compute optimized", @@ -41,75 +62,96 @@ var instanceResource = map[string]string{ "storage_size": "5", "version": "14", "flavor_id": "2.4", + "kek_key_id": "UUID1", + "kek_key_ring_id": "UUID2", + "kek_key_version": "1", + "service_account": "service@account.com", + "access_scope": "SNA", } // User resource data var userResource = map[string]string{ "username": fmt.Sprintf("tfaccuser%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlpha)), "role": "createdb", - "project_id": instanceResource["project_id"], + "project_id": testutil.ProjectId, } // Database resource data var databaseResource = map[string]string{ - "name": fmt.Sprintf("tfaccdb%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlphaNum)), + "name": fmt.Sprintf("tfaccdb%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlphaNum)), + "project_id": testutil.ProjectId, } -func configResources(backupSchedule string, region *string) string { - var regionConfig string - if region != nil { - regionConfig = fmt.Sprintf(`region = %q`, *region) - } +func configResources(backupSchedule string, _ *string) string { return fmt.Sprintf( ` %s - resource "stackit_postgresflex_instance" "instance" { - project_id = "%s" - name = "%s" - acl = ["%s"] - backup_schedule = "%s" - flavor = { - cpu = %s - ram = %s - } - replicas = %s - storage = { - class = "%s" - size = %s - } - version = "%s" - %s + + resource "stackitprivatepreview_postgresflexalpha_instance" "instance" { + project_id = "%s" + region = "%s" + name = "%s" + backup_schedule = "%s" + retention_days = %s + flavor_id = %s + replicas = %s + storage = { + performance_class = "%s" + size = %s + } + encryption = { + kek_key_id = "%s" + kek_key_ring_id = "%s" + kek_key_version = "%s" + service_account = "%s" + } + network = { + acl = ["%s"] + access_scope = "%s" + } + version = %s } - resource "stackit_postgresflex_user" "user" { - project_id = stackit_postgresflex_instance.instance.project_id - instance_id = stackit_postgresflex_instance.instance.instance_id + resource "stackitprivatepreview_postgresflexalpha_user" "user" { + project_id = "%s" + instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id username = "%s" roles = ["%s"] } - resource "stackit_postgresflex_database" "database" { - project_id = stackit_postgresflex_instance.instance.project_id - instance_id = stackit_postgresflex_instance.instance.instance_id + resource "stackitprivatepreview_postgresflexalpha_database" "database" { + project_id = "%s" + instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id name = "%s" - owner = stackit_postgresflex_user.user.username + owner = stackitprivatepreview_postgresflexalpha_user.user.username } `, - testutil.PostgresFlexProviderConfig(), + testutil.PostgresFlexProviderConfig( + utils.GetEnvOrDefault("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_FILE", "~/service-account.json"), + ), instanceResource["project_id"], + instanceResource["region"], instanceResource["name"], - instanceResource["acl"], backupSchedule, - instanceResource["flavor_cpu"], - instanceResource["flavor_ram"], + instanceResource["retention_days"], + instanceResource["flavor_id"], instanceResource["replicas"], instanceResource["storage_class"], instanceResource["storage_size"], + instanceResource["kek_key_id"], + instanceResource["kek_key_ring_id"], + instanceResource["kek_key_version"], + instanceResource["service_account"], + instanceResource["acl"], + instanceResource["access_scope"], instanceResource["version"], - regionConfig, + + userResource["project_id"], userResource["username"], userResource["role"], + + databaseResource["project_id"], databaseResource["name"], ) } @@ -126,107 +168,107 @@ func TestAccPostgresFlexFlexResource(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( // Instance resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", instanceResource["project_id"], ), resource.TestCheckResourceAttrSet( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "name", instanceResource["name"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "acl.#", "1", ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "acl.0", instanceResource["acl"], ), resource.TestCheckResourceAttrSet( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.id", ), resource.TestCheckResourceAttrSet( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.description", ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "backup_schedule", instanceResource["backup_schedule"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.cpu", instanceResource["flavor_cpu"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.ram", instanceResource["flavor_ram"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "replicas", instanceResource["replicas"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "storage.class", instanceResource["storage_class"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "storage.size", instanceResource["storage_size"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "version", instanceResource["version"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "region", testutil.Region, ), // User resource.TestCheckResourceAttrPair( - "stackit_postgresflex_user.user", "project_id", - "stackit_postgresflex_instance.instance", "project_id", + "stackitprivatepreview_postgresflexalpha_user.user", "project_id", + "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", ), resource.TestCheckResourceAttrPair( - "stackit_postgresflex_user.user", "instance_id", - "stackit_postgresflex_instance.instance", "instance_id", + "stackitprivatepreview_postgresflexalpha_user.user", "instance_id", + "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", ), - resource.TestCheckResourceAttrSet("stackit_postgresflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_postgresflex_user.user", "password"), + resource.TestCheckResourceAttrSet("stackitprivatepreview_postgresflexalpha_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackitprivatepreview_postgresflexalpha_user.user", "password"), // Database resource.TestCheckResourceAttrPair( - "stackit_postgresflex_database.database", "project_id", - "stackit_postgresflex_instance.instance", "project_id", + "stackitprivatepreview_postgresflexalpha_database.database", "project_id", + "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", ), resource.TestCheckResourceAttrPair( - "stackit_postgresflex_database.database", "instance_id", - "stackit_postgresflex_instance.instance", "instance_id", + "stackitprivatepreview_postgresflexalpha_database.database", "instance_id", + "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", ), resource.TestCheckResourceAttr( - "stackit_postgresflex_database.database", + "stackitprivatepreview_postgresflexalpha_database.database", "name", databaseResource["name"], ), resource.TestCheckResourceAttrPair( - "stackit_postgresflex_database.database", "owner", - "stackit_postgresflex_user.user", "username", + "stackitprivatepreview_postgresflexalpha_database.database", "owner", + "stackitprivatepreview_postgresflexalpha_user.user", "username", ), ), }, @@ -236,21 +278,21 @@ func TestAccPostgresFlexFlexResource(t *testing.T) { ` %s - data "stackit_postgresflex_instance" "instance" { - project_id = stackit_postgresflex_instance.instance.project_id - instance_id = stackit_postgresflex_instance.instance.instance_id + data "stackitprivatepreview_postgresflexalpha_instance" "instance" { + project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id + instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id } - data "stackit_postgresflex_user" "user" { - project_id = stackit_postgresflex_instance.instance.project_id - instance_id = stackit_postgresflex_instance.instance.instance_id - user_id = stackit_postgresflex_user.user.user_id + data "stackitprivatepreview_postgresflexalpha_user" "user" { + project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id + instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id + user_id = stackitprivatepreview_postgresflexalpha_user.user.user_id } - data "stackit_postgresflex_database" "database" { - project_id = stackit_postgresflex_instance.instance.project_id - instance_id = stackit_postgresflex_instance.instance.instance_id - database_id = stackit_postgresflex_database.database.database_id + data "stackitprivatepreview_postgresflexalpha_database" "database" { + project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id + instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id + database_id = stackitprivatepreview_postgresflexalpha_database.database.database_id } `, configResources(instanceResource["backup_schedule"], nil), @@ -258,135 +300,135 @@ func TestAccPostgresFlexFlexResource(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( // Instance data resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", instanceResource["project_id"], ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "name", instanceResource["name"], ), resource.TestCheckResourceAttrPair( - "data.stackit_postgresflex_instance.instance", "project_id", - "stackit_postgresflex_instance.instance", "project_id", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", + "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", ), resource.TestCheckResourceAttrPair( - "data.stackit_postgresflex_instance.instance", "instance_id", - "stackit_postgresflex_instance.instance", "instance_id", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", + "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", ), resource.TestCheckResourceAttrPair( - "data.stackit_postgresflex_user.user", "instance_id", - "stackit_postgresflex_user.user", "instance_id", + "data.stackitprivatepreview_postgresflexalpha_user.user", "instance_id", + "stackitprivatepreview_postgresflexalpha_user.user", "instance_id", ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "acl.#", "1", ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "acl.0", instanceResource["acl"], ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "backup_schedule", instanceResource["backup_schedule"], ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.id", instanceResource["flavor_id"], ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.description", instanceResource["flavor_description"], ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.cpu", instanceResource["flavor_cpu"], ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.ram", instanceResource["flavor_ram"], ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_instance.instance", + "data.stackitprivatepreview_postgresflexalpha_instance.instance", "replicas", instanceResource["replicas"], ), // User data resource.TestCheckResourceAttr( - "data.stackit_postgresflex_user.user", + "data.stackitprivatepreview_postgresflexalpha_user.user", "project_id", userResource["project_id"], ), resource.TestCheckResourceAttrSet( - "data.stackit_postgresflex_user.user", + "data.stackitprivatepreview_postgresflexalpha_user.user", "user_id", ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_user.user", + "data.stackitprivatepreview_postgresflexalpha_user.user", "username", userResource["username"], ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_user.user", + "data.stackitprivatepreview_postgresflexalpha_user.user", "roles.#", "1", ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_user.user", + "data.stackitprivatepreview_postgresflexalpha_user.user", "roles.0", userResource["role"], ), resource.TestCheckResourceAttrSet( - "data.stackit_postgresflex_user.user", + "data.stackitprivatepreview_postgresflexalpha_user.user", "host", ), resource.TestCheckResourceAttrSet( - "data.stackit_postgresflex_user.user", + "data.stackitprivatepreview_postgresflexalpha_user.user", "port", ), // Database data resource.TestCheckResourceAttr( - "data.stackit_postgresflex_database.database", + "data.stackitprivatepreview_postgresflexalpha_database.database", "project_id", instanceResource["project_id"], ), resource.TestCheckResourceAttr( - "data.stackit_postgresflex_database.database", + "data.stackitprivatepreview_postgresflexalpha_database.database", "name", databaseResource["name"], ), resource.TestCheckResourceAttrPair( - "data.stackit_postgresflex_database.database", + "data.stackitprivatepreview_postgresflexalpha_database.database", "instance_id", - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", ), resource.TestCheckResourceAttrPair( - "data.stackit_postgresflex_database.database", + "data.stackitprivatepreview_postgresflexalpha_database.database", "owner", - "data.stackit_postgresflex_user.user", + "data.stackitprivatepreview_postgresflexalpha_user.user", "username", ), ), }, // Import { - ResourceName: "stackit_postgresflex_instance.instance", + ResourceName: "stackitprivatepreview_postgresflexalpha_instance.instance", ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_postgresflex_instance.instance"] + r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_instance.instance"] if !ok { - return "", fmt.Errorf("couldn't find resource stackit_postgresflex_instance.instance") + return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_instance.instance") } instanceId, ok := r.Primary.Attributes["instance_id"] if !ok { @@ -400,11 +442,11 @@ func TestAccPostgresFlexFlexResource(t *testing.T) { ImportStateVerifyIgnore: []string{"password"}, }, { - ResourceName: "stackit_postgresflex_user.user", + ResourceName: "stackitprivatepreview_postgresflexalpha_user.user", ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_postgresflex_user.user"] + r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_user.user"] if !ok { - return "", fmt.Errorf("couldn't find resource stackit_postgresflex_user.user") + return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_user.user") } instanceId, ok := r.Primary.Attributes["instance_id"] if !ok { @@ -422,11 +464,11 @@ func TestAccPostgresFlexFlexResource(t *testing.T) { ImportStateVerifyIgnore: []string{"password", "uri"}, }, { - ResourceName: "stackit_postgresflex_database.database", + ResourceName: "stackitprivatepreview_postgresflexalpha_database.database", ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_postgresflex_database.database"] + r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_database.database"] if !ok { - return "", fmt.Errorf("couldn't find resource stackit_postgresflex_database.database") + return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_database.database") } instanceId, ok := r.Primary.Attributes["instance_id"] if !ok { @@ -454,69 +496,69 @@ func TestAccPostgresFlexFlexResource(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( // Instance data resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", instanceResource["project_id"], ), resource.TestCheckResourceAttrSet( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "name", instanceResource["name"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "acl.#", "1", ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "acl.0", instanceResource["acl"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "backup_schedule", instanceResource["backup_schedule_updated"], ), resource.TestCheckResourceAttrSet( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.id", ), resource.TestCheckResourceAttrSet( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.description", ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.cpu", instanceResource["flavor_cpu"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "flavor.ram", instanceResource["flavor_ram"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "replicas", instanceResource["replicas"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "storage.class", instanceResource["storage_class"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "storage.size", instanceResource["storage_size"], ), resource.TestCheckResourceAttr( - "stackit_postgresflex_instance.instance", + "stackitprivatepreview_postgresflexalpha_instance.instance", "version", instanceResource["version"], ), @@ -545,7 +587,7 @@ func testAccCheckPostgresFlexDestroy(s *terraform.State) error { instancesToDestroy := []string{} for _, rs := range s.RootModule().Resources { - if rs.Type != "stackit_postgresflex_instance" { + if rs.Type != "stackitprivatepreview_postgresflexalpha_instance" { continue } // instance terraform ID: = "[project_id],[region],[instance_id]" diff --git a/stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf b/stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf index 8b137891..7a708880 100644 --- a/stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf +++ b/stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf @@ -1 +1,24 @@ - +resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + name = "example-instance" + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + backup_schedule = "0 0 * * *" + retention_days = 30 + flavor_id = "flavor.id" + replicas = 1 + storage = { + performance_class = "premium-perf2-stackit" + size = 10 + } + encryption = { + kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_version = 1 + service_account = "service@account.email" + } + network = { + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + access_scope = "PUBLIC" + } + version = 17 +} diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go index 4cc56ea2..bb6c3038 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/datasource.go @@ -146,15 +146,11 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques data.Id = types.Int64Value(dbId) data.TfId = utils.BuildInternalTerraformId(projectId, region, instanceId, databaseName) data.Owner = types.StringValue(databaseResp.GetOwner()) - - // TODO: fill remaining fields - // data.CollationName = types.Sometype(apiResponse.GetCollationName()) - // data.CompatibilityLevel = types.Sometype(apiResponse.GetCompatibilityLevel()) - // data.DatabaseName = types.Sometype(apiResponse.GetDatabaseName()) - // data.Id = types.Sometype(apiResponse.GetId()) - // data.InstanceId = types.Sometype(apiResponse.GetInstanceId()) + data.CollationName = types.StringValue(databaseResp.GetCollationName()) + data.CompatibilityLevel = types.Int64Value(databaseResp.GetCompatibilityLevel()) + data.DatabaseName = types.StringValue(databaseResp.GetName()) + // data.InstanceId = types.StringValue(databaseResp.GetInstanceId()) // data.Name = types.Sometype(apiResponse.GetName()) - // data.Owner = types.Sometype(apiResponse.GetOwner()) // data.ProjectId = types.Sometype(apiResponse.GetProjectId()) // data.Region = types.Sometype(apiResponse.GetRegion()) diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go deleted file mode 100644 index 41b1aad8..00000000 --- a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go +++ /dev/null @@ -1,118 +0,0 @@ -package sqlserverflexbeta - -import ( - "context" - "fmt" - "net/http" - - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-log/tflog" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" - - sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" - - sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" - - sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen" -) - -var _ datasource.DataSource = (*flavorsDataSource)(nil) - -const errorPrefix = "[Sqlserverflexbeta - Flavors]" - -func NewFlavorsDataSource() datasource.DataSource { - return &flavorsDataSource{} -} - -type flavorsDataSource struct { - client *sqlserverflexbetaPkg.APIClient - providerData core.ProviderData -} - -func (d *flavorsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavors" -} - -func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - resp.Schema = sqlserverflexbetaGen.FlavorsDataSourceSchema(ctx) -} - -// Configure adds the provider configured client to the data source. -func (d *flavorsDataSource) Configure( - ctx context.Context, - req datasource.ConfigureRequest, - resp *datasource.ConfigureResponse, -) { - var ok bool - d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) - if !ok { - return - } - - apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - d.client = apiClient - tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) -} - -func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data sqlserverflexbetaGen.FlavorsModel - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) - - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId := data.ProjectId.ValueString() - region := d.providerData.GetRegionWithOverride(data.Region) - flavorsId := data.FlavorsId.ValueString() - - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "region", region) - ctx = tflog.SetField(ctx, "flavors_id", flavorsId) - - flavorsResp, err := d.client.GetFlavorsRequest(ctx, projectId, region, flavorsId).Execute() - if err != nil { - utils.LogError( - ctx, - &resp.Diagnostics, - err, - "Reading flavors", - fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId), - map[int]string{ - http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), - }, - ) - resp.State.RemoveResource(ctx) - return - } - - ctx = core.LogResponse(ctx) - - // Todo: Read API call logic - - // Example data value setting - // data.Id = types.StringValue("example-id") - - err = mapResponseToModel(ctx, flavorsResp, &data, resp.Diagnostics) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - fmt.Sprintf("%s Read", errorPrefix), - fmt.Sprintf("Processing API payload: %v", err), - ) - return - } - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go deleted file mode 100644 index 94b526be..00000000 --- a/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go +++ /dev/null @@ -1,1909 +0,0 @@ -// Code generated by terraform-plugin-framework-generator DO NOT EDIT. - -package sqlserverflexbeta - -import ( - "context" - "fmt" - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" - "github.com/hashicorp/terraform-plugin-go/tftypes" - "strings" - - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" -) - -func FlavorsDataSourceSchema(ctx context.Context) schema.Schema { - return schema.Schema{ - Attributes: map[string]schema.Attribute{ - "flavors": schema.ListNestedAttribute{ - NestedObject: schema.NestedAttributeObject{ - Attributes: map[string]schema.Attribute{ - "cpu": schema.Int64Attribute{ - Computed: true, - Description: "The cpu count of the instance.", - MarkdownDescription: "The cpu count of the instance.", - }, - "description": schema.StringAttribute{ - Computed: true, - Description: "The flavor description.", - MarkdownDescription: "The flavor description.", - }, - "id": schema.StringAttribute{ - Computed: true, - Description: "The id of the instance flavor.", - MarkdownDescription: "The id of the instance flavor.", - }, - "max_gb": schema.Int64Attribute{ - Computed: true, - Description: "maximum storage which can be ordered for the flavor in Gigabyte.", - MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", - }, - "memory": schema.Int64Attribute{ - Computed: true, - Description: "The memory of the instance in Gibibyte.", - MarkdownDescription: "The memory of the instance in Gibibyte.", - }, - "min_gb": schema.Int64Attribute{ - Computed: true, - Description: "minimum storage which is required to order in Gigabyte.", - MarkdownDescription: "minimum storage which is required to order in Gigabyte.", - }, - "node_type": schema.StringAttribute{ - Computed: true, - Description: "defines the nodeType it can be either single or HA", - MarkdownDescription: "defines the nodeType it can be either single or HA", - }, - "storage_classes": schema.ListNestedAttribute{ - NestedObject: schema.NestedAttributeObject{ - Attributes: map[string]schema.Attribute{ - "class": schema.StringAttribute{ - Computed: true, - }, - "max_io_per_sec": schema.Int64Attribute{ - Computed: true, - }, - "max_through_in_mb": schema.Int64Attribute{ - Computed: true, - }, - }, - CustomType: StorageClassesType{ - ObjectType: types.ObjectType{ - AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), - }, - }, - }, - Computed: true, - Description: "maximum storage which can be ordered for the flavor in Gigabyte.", - MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", - }, - }, - CustomType: FlavorsType{ - ObjectType: types.ObjectType{ - AttrTypes: FlavorsValue{}.AttributeTypes(ctx), - }, - }, - }, - Computed: true, - Description: "List of flavors available for the project.", - MarkdownDescription: "List of flavors available for the project.", - }, - "page": schema.Int64Attribute{ - Optional: true, - Computed: true, - Description: "Number of the page of items list to be returned.", - MarkdownDescription: "Number of the page of items list to be returned.", - }, - "pagination": schema.SingleNestedAttribute{ - Attributes: map[string]schema.Attribute{ - "page": schema.Int64Attribute{ - Computed: true, - }, - "size": schema.Int64Attribute{ - Computed: true, - }, - "sort": schema.StringAttribute{ - Computed: true, - }, - "total_pages": schema.Int64Attribute{ - Computed: true, - }, - "total_rows": schema.Int64Attribute{ - Computed: true, - }, - }, - CustomType: PaginationType{ - ObjectType: types.ObjectType{ - AttrTypes: PaginationValue{}.AttributeTypes(ctx), - }, - }, - Computed: true, - }, - "project_id": schema.StringAttribute{ - Required: true, - Description: "The STACKIT project ID.", - MarkdownDescription: "The STACKIT project ID.", - }, - "region": schema.StringAttribute{ - Required: true, - Description: "The region which should be addressed", - MarkdownDescription: "The region which should be addressed", - Validators: []validator.String{ - stringvalidator.OneOf( - "eu01", - ), - }, - }, - "size": schema.Int64Attribute{ - Optional: true, - Computed: true, - Description: "Number of items to be returned on each page.", - MarkdownDescription: "Number of items to be returned on each page.", - }, - "sort": schema.StringAttribute{ - Optional: true, - Computed: true, - Description: "Sorting of the flavors to be returned on each page.", - MarkdownDescription: "Sorting of the flavors to be returned on each page.", - Validators: []validator.String{ - stringvalidator.OneOf( - "index.desc", - "index.asc", - "cpu.desc", - "cpu.asc", - "flavor_description.asc", - "flavor_description.desc", - "id.desc", - "id.asc", - "size_max.desc", - "size_max.asc", - "ram.desc", - "ram.asc", - "size_min.desc", - "size_min.asc", - "storage_class.asc", - "storage_class.desc", - "node_type.asc", - "node_type.desc", - ), - }, - }, - }, - } -} - -type FlavorsModel struct { - Flavors types.List `tfsdk:"flavors"` - Page types.Int64 `tfsdk:"page"` - Pagination PaginationValue `tfsdk:"pagination"` - ProjectId types.String `tfsdk:"project_id"` - Region types.String `tfsdk:"region"` - Size types.Int64 `tfsdk:"size"` - Sort types.String `tfsdk:"sort"` -} - -var _ basetypes.ObjectTypable = FlavorsType{} - -type FlavorsType struct { - basetypes.ObjectType -} - -func (t FlavorsType) Equal(o attr.Type) bool { - other, ok := o.(FlavorsType) - - if !ok { - return false - } - - return t.ObjectType.Equal(other.ObjectType) -} - -func (t FlavorsType) String() string { - return "FlavorsType" -} - -func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { - var diags diag.Diagnostics - - attributes := in.Attributes() - - cpuAttribute, ok := attributes["cpu"] - - if !ok { - diags.AddError( - "Attribute Missing", - `cpu is missing from object`) - - return nil, diags - } - - cpuVal, ok := cpuAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute)) - } - - descriptionAttribute, ok := attributes["description"] - - if !ok { - diags.AddError( - "Attribute Missing", - `description is missing from object`) - - return nil, diags - } - - descriptionVal, ok := descriptionAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute)) - } - - idAttribute, ok := attributes["id"] - - if !ok { - diags.AddError( - "Attribute Missing", - `id is missing from object`) - - return nil, diags - } - - idVal, ok := idAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) - } - - maxGbAttribute, ok := attributes["max_gb"] - - if !ok { - diags.AddError( - "Attribute Missing", - `max_gb is missing from object`) - - return nil, diags - } - - maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute)) - } - - memoryAttribute, ok := attributes["memory"] - - if !ok { - diags.AddError( - "Attribute Missing", - `memory is missing from object`) - - return nil, diags - } - - memoryVal, ok := memoryAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute)) - } - - minGbAttribute, ok := attributes["min_gb"] - - if !ok { - diags.AddError( - "Attribute Missing", - `min_gb is missing from object`) - - return nil, diags - } - - minGbVal, ok := minGbAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute)) - } - - nodeTypeAttribute, ok := attributes["node_type"] - - if !ok { - diags.AddError( - "Attribute Missing", - `node_type is missing from object`) - - return nil, diags - } - - nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute)) - } - - storageClassesAttribute, ok := attributes["storage_classes"] - - if !ok { - diags.AddError( - "Attribute Missing", - `storage_classes is missing from object`) - - return nil, diags - } - - storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute)) - } - - if diags.HasError() { - return nil, diags - } - - return FlavorsValue{ - Cpu: cpuVal, - Description: descriptionVal, - Id: idVal, - MaxGb: maxGbVal, - Memory: memoryVal, - MinGb: minGbVal, - NodeType: nodeTypeVal, - StorageClasses: storageClassesVal, - state: attr.ValueStateKnown, - }, diags -} - -func NewFlavorsValueNull() FlavorsValue { - return FlavorsValue{ - state: attr.ValueStateNull, - } -} - -func NewFlavorsValueUnknown() FlavorsValue { - return FlavorsValue{ - state: attr.ValueStateUnknown, - } -} - -func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) { - var diags diag.Diagnostics - - // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 - ctx := context.Background() - - for name, attributeType := range attributeTypes { - attribute, ok := attributes[name] - - if !ok { - diags.AddError( - "Missing FlavorsValue Attribute Value", - "While creating a FlavorsValue value, a missing attribute value was detected. "+ - "A FlavorsValue must contain values for all attributes, even if null or unknown. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), - ) - - continue - } - - if !attributeType.Equal(attribute.Type(ctx)) { - diags.AddError( - "Invalid FlavorsValue Attribute Type", - "While creating a FlavorsValue value, an invalid attribute value was detected. "+ - "A FlavorsValue must use a matching attribute type for the value. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ - fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), - ) - } - } - - for name := range attributes { - _, ok := attributeTypes[name] - - if !ok { - diags.AddError( - "Extra FlavorsValue Attribute Value", - "While creating a FlavorsValue value, an extra attribute value was detected. "+ - "A FlavorsValue must not contain values beyond the expected attribute types. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name), - ) - } - } - - if diags.HasError() { - return NewFlavorsValueUnknown(), diags - } - - cpuAttribute, ok := attributes["cpu"] - - if !ok { - diags.AddError( - "Attribute Missing", - `cpu is missing from object`) - - return NewFlavorsValueUnknown(), diags - } - - cpuVal, ok := cpuAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute)) - } - - descriptionAttribute, ok := attributes["description"] - - if !ok { - diags.AddError( - "Attribute Missing", - `description is missing from object`) - - return NewFlavorsValueUnknown(), diags - } - - descriptionVal, ok := descriptionAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute)) - } - - idAttribute, ok := attributes["id"] - - if !ok { - diags.AddError( - "Attribute Missing", - `id is missing from object`) - - return NewFlavorsValueUnknown(), diags - } - - idVal, ok := idAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) - } - - maxGbAttribute, ok := attributes["max_gb"] - - if !ok { - diags.AddError( - "Attribute Missing", - `max_gb is missing from object`) - - return NewFlavorsValueUnknown(), diags - } - - maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute)) - } - - memoryAttribute, ok := attributes["memory"] - - if !ok { - diags.AddError( - "Attribute Missing", - `memory is missing from object`) - - return NewFlavorsValueUnknown(), diags - } - - memoryVal, ok := memoryAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute)) - } - - minGbAttribute, ok := attributes["min_gb"] - - if !ok { - diags.AddError( - "Attribute Missing", - `min_gb is missing from object`) - - return NewFlavorsValueUnknown(), diags - } - - minGbVal, ok := minGbAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute)) - } - - nodeTypeAttribute, ok := attributes["node_type"] - - if !ok { - diags.AddError( - "Attribute Missing", - `node_type is missing from object`) - - return NewFlavorsValueUnknown(), diags - } - - nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute)) - } - - storageClassesAttribute, ok := attributes["storage_classes"] - - if !ok { - diags.AddError( - "Attribute Missing", - `storage_classes is missing from object`) - - return NewFlavorsValueUnknown(), diags - } - - storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute)) - } - - if diags.HasError() { - return NewFlavorsValueUnknown(), diags - } - - return FlavorsValue{ - Cpu: cpuVal, - Description: descriptionVal, - Id: idVal, - MaxGb: maxGbVal, - Memory: memoryVal, - MinGb: minGbVal, - NodeType: nodeTypeVal, - StorageClasses: storageClassesVal, - state: attr.ValueStateKnown, - }, diags -} - -func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue { - object, diags := NewFlavorsValue(attributeTypes, attributes) - - if diags.HasError() { - // This could potentially be added to the diag package. - diagsStrings := make([]string, 0, len(diags)) - - for _, diagnostic := range diags { - diagsStrings = append(diagsStrings, fmt.Sprintf( - "%s | %s | %s", - diagnostic.Severity(), - diagnostic.Summary(), - diagnostic.Detail())) - } - - panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n")) - } - - return object -} - -func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { - if in.Type() == nil { - return NewFlavorsValueNull(), nil - } - - if !in.Type().Equal(t.TerraformType(ctx)) { - return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) - } - - if !in.IsKnown() { - return NewFlavorsValueUnknown(), nil - } - - if in.IsNull() { - return NewFlavorsValueNull(), nil - } - - attributes := map[string]attr.Value{} - - val := map[string]tftypes.Value{} - - err := in.As(&val) - - if err != nil { - return nil, err - } - - for k, v := range val { - a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) - - if err != nil { - return nil, err - } - - attributes[k] = a - } - - return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil -} - -func (t FlavorsType) ValueType(ctx context.Context) attr.Value { - return FlavorsValue{} -} - -var _ basetypes.ObjectValuable = FlavorsValue{} - -type FlavorsValue struct { - Cpu basetypes.Int64Value `tfsdk:"cpu"` - Description basetypes.StringValue `tfsdk:"description"` - Id basetypes.StringValue `tfsdk:"id"` - MaxGb basetypes.Int64Value `tfsdk:"max_gb"` - Memory basetypes.Int64Value `tfsdk:"memory"` - MinGb basetypes.Int64Value `tfsdk:"min_gb"` - NodeType basetypes.StringValue `tfsdk:"node_type"` - StorageClasses basetypes.ListValue `tfsdk:"storage_classes"` - state attr.ValueState -} - -func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { - attrTypes := make(map[string]tftypes.Type, 8) - - var val tftypes.Value - var err error - - attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx) - attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx) - attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx) - attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx) - attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx) - attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx) - attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx) - attrTypes["storage_classes"] = basetypes.ListType{ - ElemType: StorageClassesValue{}.Type(ctx), - }.TerraformType(ctx) - - objectType := tftypes.Object{AttributeTypes: attrTypes} - - switch v.state { - case attr.ValueStateKnown: - vals := make(map[string]tftypes.Value, 8) - - val, err = v.Cpu.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["cpu"] = val - - val, err = v.Description.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["description"] = val - - val, err = v.Id.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["id"] = val - - val, err = v.MaxGb.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["max_gb"] = val - - val, err = v.Memory.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["memory"] = val - - val, err = v.MinGb.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["min_gb"] = val - - val, err = v.NodeType.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["node_type"] = val - - val, err = v.StorageClasses.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["storage_classes"] = val - - if err := tftypes.ValidateValue(objectType, vals); err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - return tftypes.NewValue(objectType, vals), nil - case attr.ValueStateNull: - return tftypes.NewValue(objectType, nil), nil - case attr.ValueStateUnknown: - return tftypes.NewValue(objectType, tftypes.UnknownValue), nil - default: - panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) - } -} - -func (v FlavorsValue) IsNull() bool { - return v.state == attr.ValueStateNull -} - -func (v FlavorsValue) IsUnknown() bool { - return v.state == attr.ValueStateUnknown -} - -func (v FlavorsValue) String() string { - return "FlavorsValue" -} - -func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { - var diags diag.Diagnostics - - storageClasses := types.ListValueMust( - StorageClassesType{ - basetypes.ObjectType{ - AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), - }, - }, - v.StorageClasses.Elements(), - ) - - if v.StorageClasses.IsNull() { - storageClasses = types.ListNull( - StorageClassesType{ - basetypes.ObjectType{ - AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), - }, - }, - ) - } - - if v.StorageClasses.IsUnknown() { - storageClasses = types.ListUnknown( - StorageClassesType{ - basetypes.ObjectType{ - AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), - }, - }, - ) - } - - attributeTypes := map[string]attr.Type{ - "cpu": basetypes.Int64Type{}, - "description": basetypes.StringType{}, - "id": basetypes.StringType{}, - "max_gb": basetypes.Int64Type{}, - "memory": basetypes.Int64Type{}, - "min_gb": basetypes.Int64Type{}, - "node_type": basetypes.StringType{}, - "storage_classes": basetypes.ListType{ - ElemType: StorageClassesValue{}.Type(ctx), - }, - } - - if v.IsNull() { - return types.ObjectNull(attributeTypes), diags - } - - if v.IsUnknown() { - return types.ObjectUnknown(attributeTypes), diags - } - - objVal, diags := types.ObjectValue( - attributeTypes, - map[string]attr.Value{ - "cpu": v.Cpu, - "description": v.Description, - "id": v.Id, - "max_gb": v.MaxGb, - "memory": v.Memory, - "min_gb": v.MinGb, - "node_type": v.NodeType, - "storage_classes": storageClasses, - }) - - return objVal, diags -} - -func (v FlavorsValue) Equal(o attr.Value) bool { - other, ok := o.(FlavorsValue) - - if !ok { - return false - } - - if v.state != other.state { - return false - } - - if v.state != attr.ValueStateKnown { - return true - } - - if !v.Cpu.Equal(other.Cpu) { - return false - } - - if !v.Description.Equal(other.Description) { - return false - } - - if !v.Id.Equal(other.Id) { - return false - } - - if !v.MaxGb.Equal(other.MaxGb) { - return false - } - - if !v.Memory.Equal(other.Memory) { - return false - } - - if !v.MinGb.Equal(other.MinGb) { - return false - } - - if !v.NodeType.Equal(other.NodeType) { - return false - } - - if !v.StorageClasses.Equal(other.StorageClasses) { - return false - } - - return true -} - -func (v FlavorsValue) Type(ctx context.Context) attr.Type { - return FlavorsType{ - basetypes.ObjectType{ - AttrTypes: v.AttributeTypes(ctx), - }, - } -} - -func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type { - return map[string]attr.Type{ - "cpu": basetypes.Int64Type{}, - "description": basetypes.StringType{}, - "id": basetypes.StringType{}, - "max_gb": basetypes.Int64Type{}, - "memory": basetypes.Int64Type{}, - "min_gb": basetypes.Int64Type{}, - "node_type": basetypes.StringType{}, - "storage_classes": basetypes.ListType{ - ElemType: StorageClassesValue{}.Type(ctx), - }, - } -} - -var _ basetypes.ObjectTypable = StorageClassesType{} - -type StorageClassesType struct { - basetypes.ObjectType -} - -func (t StorageClassesType) Equal(o attr.Type) bool { - other, ok := o.(StorageClassesType) - - if !ok { - return false - } - - return t.ObjectType.Equal(other.ObjectType) -} - -func (t StorageClassesType) String() string { - return "StorageClassesType" -} - -func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { - var diags diag.Diagnostics - - attributes := in.Attributes() - - classAttribute, ok := attributes["class"] - - if !ok { - diags.AddError( - "Attribute Missing", - `class is missing from object`) - - return nil, diags - } - - classVal, ok := classAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) - } - - maxIoPerSecAttribute, ok := attributes["max_io_per_sec"] - - if !ok { - diags.AddError( - "Attribute Missing", - `max_io_per_sec is missing from object`) - - return nil, diags - } - - maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute)) - } - - maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] - - if !ok { - diags.AddError( - "Attribute Missing", - `max_through_in_mb is missing from object`) - - return nil, diags - } - - maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute)) - } - - if diags.HasError() { - return nil, diags - } - - return StorageClassesValue{ - Class: classVal, - MaxIoPerSec: maxIoPerSecVal, - MaxThroughInMb: maxThroughInMbVal, - state: attr.ValueStateKnown, - }, diags -} - -func NewStorageClassesValueNull() StorageClassesValue { - return StorageClassesValue{ - state: attr.ValueStateNull, - } -} - -func NewStorageClassesValueUnknown() StorageClassesValue { - return StorageClassesValue{ - state: attr.ValueStateUnknown, - } -} - -func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) { - var diags diag.Diagnostics - - // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 - ctx := context.Background() - - for name, attributeType := range attributeTypes { - attribute, ok := attributes[name] - - if !ok { - diags.AddError( - "Missing StorageClassesValue Attribute Value", - "While creating a StorageClassesValue value, a missing attribute value was detected. "+ - "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), - ) - - continue - } - - if !attributeType.Equal(attribute.Type(ctx)) { - diags.AddError( - "Invalid StorageClassesValue Attribute Type", - "While creating a StorageClassesValue value, an invalid attribute value was detected. "+ - "A StorageClassesValue must use a matching attribute type for the value. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ - fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), - ) - } - } - - for name := range attributes { - _, ok := attributeTypes[name] - - if !ok { - diags.AddError( - "Extra StorageClassesValue Attribute Value", - "While creating a StorageClassesValue value, an extra attribute value was detected. "+ - "A StorageClassesValue must not contain values beyond the expected attribute types. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name), - ) - } - } - - if diags.HasError() { - return NewStorageClassesValueUnknown(), diags - } - - classAttribute, ok := attributes["class"] - - if !ok { - diags.AddError( - "Attribute Missing", - `class is missing from object`) - - return NewStorageClassesValueUnknown(), diags - } - - classVal, ok := classAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) - } - - maxIoPerSecAttribute, ok := attributes["max_io_per_sec"] - - if !ok { - diags.AddError( - "Attribute Missing", - `max_io_per_sec is missing from object`) - - return NewStorageClassesValueUnknown(), diags - } - - maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute)) - } - - maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] - - if !ok { - diags.AddError( - "Attribute Missing", - `max_through_in_mb is missing from object`) - - return NewStorageClassesValueUnknown(), diags - } - - maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute)) - } - - if diags.HasError() { - return NewStorageClassesValueUnknown(), diags - } - - return StorageClassesValue{ - Class: classVal, - MaxIoPerSec: maxIoPerSecVal, - MaxThroughInMb: maxThroughInMbVal, - state: attr.ValueStateKnown, - }, diags -} - -func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue { - object, diags := NewStorageClassesValue(attributeTypes, attributes) - - if diags.HasError() { - // This could potentially be added to the diag package. - diagsStrings := make([]string, 0, len(diags)) - - for _, diagnostic := range diags { - diagsStrings = append(diagsStrings, fmt.Sprintf( - "%s | %s | %s", - diagnostic.Severity(), - diagnostic.Summary(), - diagnostic.Detail())) - } - - panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n")) - } - - return object -} - -func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { - if in.Type() == nil { - return NewStorageClassesValueNull(), nil - } - - if !in.Type().Equal(t.TerraformType(ctx)) { - return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) - } - - if !in.IsKnown() { - return NewStorageClassesValueUnknown(), nil - } - - if in.IsNull() { - return NewStorageClassesValueNull(), nil - } - - attributes := map[string]attr.Value{} - - val := map[string]tftypes.Value{} - - err := in.As(&val) - - if err != nil { - return nil, err - } - - for k, v := range val { - a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) - - if err != nil { - return nil, err - } - - attributes[k] = a - } - - return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil -} - -func (t StorageClassesType) ValueType(ctx context.Context) attr.Value { - return StorageClassesValue{} -} - -var _ basetypes.ObjectValuable = StorageClassesValue{} - -type StorageClassesValue struct { - Class basetypes.StringValue `tfsdk:"class"` - MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"` - MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"` - state attr.ValueState -} - -func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { - attrTypes := make(map[string]tftypes.Type, 3) - - var val tftypes.Value - var err error - - attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx) - attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx) - attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx) - - objectType := tftypes.Object{AttributeTypes: attrTypes} - - switch v.state { - case attr.ValueStateKnown: - vals := make(map[string]tftypes.Value, 3) - - val, err = v.Class.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["class"] = val - - val, err = v.MaxIoPerSec.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["max_io_per_sec"] = val - - val, err = v.MaxThroughInMb.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["max_through_in_mb"] = val - - if err := tftypes.ValidateValue(objectType, vals); err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - return tftypes.NewValue(objectType, vals), nil - case attr.ValueStateNull: - return tftypes.NewValue(objectType, nil), nil - case attr.ValueStateUnknown: - return tftypes.NewValue(objectType, tftypes.UnknownValue), nil - default: - panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) - } -} - -func (v StorageClassesValue) IsNull() bool { - return v.state == attr.ValueStateNull -} - -func (v StorageClassesValue) IsUnknown() bool { - return v.state == attr.ValueStateUnknown -} - -func (v StorageClassesValue) String() string { - return "StorageClassesValue" -} - -func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { - var diags diag.Diagnostics - - attributeTypes := map[string]attr.Type{ - "class": basetypes.StringType{}, - "max_io_per_sec": basetypes.Int64Type{}, - "max_through_in_mb": basetypes.Int64Type{}, - } - - if v.IsNull() { - return types.ObjectNull(attributeTypes), diags - } - - if v.IsUnknown() { - return types.ObjectUnknown(attributeTypes), diags - } - - objVal, diags := types.ObjectValue( - attributeTypes, - map[string]attr.Value{ - "class": v.Class, - "max_io_per_sec": v.MaxIoPerSec, - "max_through_in_mb": v.MaxThroughInMb, - }) - - return objVal, diags -} - -func (v StorageClassesValue) Equal(o attr.Value) bool { - other, ok := o.(StorageClassesValue) - - if !ok { - return false - } - - if v.state != other.state { - return false - } - - if v.state != attr.ValueStateKnown { - return true - } - - if !v.Class.Equal(other.Class) { - return false - } - - if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) { - return false - } - - if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) { - return false - } - - return true -} - -func (v StorageClassesValue) Type(ctx context.Context) attr.Type { - return StorageClassesType{ - basetypes.ObjectType{ - AttrTypes: v.AttributeTypes(ctx), - }, - } -} - -func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type { - return map[string]attr.Type{ - "class": basetypes.StringType{}, - "max_io_per_sec": basetypes.Int64Type{}, - "max_through_in_mb": basetypes.Int64Type{}, - } -} - -var _ basetypes.ObjectTypable = PaginationType{} - -type PaginationType struct { - basetypes.ObjectType -} - -func (t PaginationType) Equal(o attr.Type) bool { - other, ok := o.(PaginationType) - - if !ok { - return false - } - - return t.ObjectType.Equal(other.ObjectType) -} - -func (t PaginationType) String() string { - return "PaginationType" -} - -func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { - var diags diag.Diagnostics - - attributes := in.Attributes() - - pageAttribute, ok := attributes["page"] - - if !ok { - diags.AddError( - "Attribute Missing", - `page is missing from object`) - - return nil, diags - } - - pageVal, ok := pageAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) - } - - sizeAttribute, ok := attributes["size"] - - if !ok { - diags.AddError( - "Attribute Missing", - `size is missing from object`) - - return nil, diags - } - - sizeVal, ok := sizeAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) - } - - sortAttribute, ok := attributes["sort"] - - if !ok { - diags.AddError( - "Attribute Missing", - `sort is missing from object`) - - return nil, diags - } - - sortVal, ok := sortAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) - } - - totalPagesAttribute, ok := attributes["total_pages"] - - if !ok { - diags.AddError( - "Attribute Missing", - `total_pages is missing from object`) - - return nil, diags - } - - totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) - } - - totalRowsAttribute, ok := attributes["total_rows"] - - if !ok { - diags.AddError( - "Attribute Missing", - `total_rows is missing from object`) - - return nil, diags - } - - totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) - } - - if diags.HasError() { - return nil, diags - } - - return PaginationValue{ - Page: pageVal, - Size: sizeVal, - Sort: sortVal, - TotalPages: totalPagesVal, - TotalRows: totalRowsVal, - state: attr.ValueStateKnown, - }, diags -} - -func NewPaginationValueNull() PaginationValue { - return PaginationValue{ - state: attr.ValueStateNull, - } -} - -func NewPaginationValueUnknown() PaginationValue { - return PaginationValue{ - state: attr.ValueStateUnknown, - } -} - -func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) { - var diags diag.Diagnostics - - // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 - ctx := context.Background() - - for name, attributeType := range attributeTypes { - attribute, ok := attributes[name] - - if !ok { - diags.AddError( - "Missing PaginationValue Attribute Value", - "While creating a PaginationValue value, a missing attribute value was detected. "+ - "A PaginationValue must contain values for all attributes, even if null or unknown. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), - ) - - continue - } - - if !attributeType.Equal(attribute.Type(ctx)) { - diags.AddError( - "Invalid PaginationValue Attribute Type", - "While creating a PaginationValue value, an invalid attribute value was detected. "+ - "A PaginationValue must use a matching attribute type for the value. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ - fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), - ) - } - } - - for name := range attributes { - _, ok := attributeTypes[name] - - if !ok { - diags.AddError( - "Extra PaginationValue Attribute Value", - "While creating a PaginationValue value, an extra attribute value was detected. "+ - "A PaginationValue must not contain values beyond the expected attribute types. "+ - "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ - fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name), - ) - } - } - - if diags.HasError() { - return NewPaginationValueUnknown(), diags - } - - pageAttribute, ok := attributes["page"] - - if !ok { - diags.AddError( - "Attribute Missing", - `page is missing from object`) - - return NewPaginationValueUnknown(), diags - } - - pageVal, ok := pageAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) - } - - sizeAttribute, ok := attributes["size"] - - if !ok { - diags.AddError( - "Attribute Missing", - `size is missing from object`) - - return NewPaginationValueUnknown(), diags - } - - sizeVal, ok := sizeAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) - } - - sortAttribute, ok := attributes["sort"] - - if !ok { - diags.AddError( - "Attribute Missing", - `sort is missing from object`) - - return NewPaginationValueUnknown(), diags - } - - sortVal, ok := sortAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) - } - - totalPagesAttribute, ok := attributes["total_pages"] - - if !ok { - diags.AddError( - "Attribute Missing", - `total_pages is missing from object`) - - return NewPaginationValueUnknown(), diags - } - - totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) - } - - totalRowsAttribute, ok := attributes["total_rows"] - - if !ok { - diags.AddError( - "Attribute Missing", - `total_rows is missing from object`) - - return NewPaginationValueUnknown(), diags - } - - totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) - } - - if diags.HasError() { - return NewPaginationValueUnknown(), diags - } - - return PaginationValue{ - Page: pageVal, - Size: sizeVal, - Sort: sortVal, - TotalPages: totalPagesVal, - TotalRows: totalRowsVal, - state: attr.ValueStateKnown, - }, diags -} - -func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue { - object, diags := NewPaginationValue(attributeTypes, attributes) - - if diags.HasError() { - // This could potentially be added to the diag package. - diagsStrings := make([]string, 0, len(diags)) - - for _, diagnostic := range diags { - diagsStrings = append(diagsStrings, fmt.Sprintf( - "%s | %s | %s", - diagnostic.Severity(), - diagnostic.Summary(), - diagnostic.Detail())) - } - - panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n")) - } - - return object -} - -func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { - if in.Type() == nil { - return NewPaginationValueNull(), nil - } - - if !in.Type().Equal(t.TerraformType(ctx)) { - return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) - } - - if !in.IsKnown() { - return NewPaginationValueUnknown(), nil - } - - if in.IsNull() { - return NewPaginationValueNull(), nil - } - - attributes := map[string]attr.Value{} - - val := map[string]tftypes.Value{} - - err := in.As(&val) - - if err != nil { - return nil, err - } - - for k, v := range val { - a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) - - if err != nil { - return nil, err - } - - attributes[k] = a - } - - return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil -} - -func (t PaginationType) ValueType(ctx context.Context) attr.Value { - return PaginationValue{} -} - -var _ basetypes.ObjectValuable = PaginationValue{} - -type PaginationValue struct { - Page basetypes.Int64Value `tfsdk:"page"` - Size basetypes.Int64Value `tfsdk:"size"` - Sort basetypes.StringValue `tfsdk:"sort"` - TotalPages basetypes.Int64Value `tfsdk:"total_pages"` - TotalRows basetypes.Int64Value `tfsdk:"total_rows"` - state attr.ValueState -} - -func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { - attrTypes := make(map[string]tftypes.Type, 5) - - var val tftypes.Value - var err error - - attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx) - attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx) - attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx) - attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx) - attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx) - - objectType := tftypes.Object{AttributeTypes: attrTypes} - - switch v.state { - case attr.ValueStateKnown: - vals := make(map[string]tftypes.Value, 5) - - val, err = v.Page.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["page"] = val - - val, err = v.Size.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["size"] = val - - val, err = v.Sort.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["sort"] = val - - val, err = v.TotalPages.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["total_pages"] = val - - val, err = v.TotalRows.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["total_rows"] = val - - if err := tftypes.ValidateValue(objectType, vals); err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - return tftypes.NewValue(objectType, vals), nil - case attr.ValueStateNull: - return tftypes.NewValue(objectType, nil), nil - case attr.ValueStateUnknown: - return tftypes.NewValue(objectType, tftypes.UnknownValue), nil - default: - panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) - } -} - -func (v PaginationValue) IsNull() bool { - return v.state == attr.ValueStateNull -} - -func (v PaginationValue) IsUnknown() bool { - return v.state == attr.ValueStateUnknown -} - -func (v PaginationValue) String() string { - return "PaginationValue" -} - -func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { - var diags diag.Diagnostics - - attributeTypes := map[string]attr.Type{ - "page": basetypes.Int64Type{}, - "size": basetypes.Int64Type{}, - "sort": basetypes.StringType{}, - "total_pages": basetypes.Int64Type{}, - "total_rows": basetypes.Int64Type{}, - } - - if v.IsNull() { - return types.ObjectNull(attributeTypes), diags - } - - if v.IsUnknown() { - return types.ObjectUnknown(attributeTypes), diags - } - - objVal, diags := types.ObjectValue( - attributeTypes, - map[string]attr.Value{ - "page": v.Page, - "size": v.Size, - "sort": v.Sort, - "total_pages": v.TotalPages, - "total_rows": v.TotalRows, - }) - - return objVal, diags -} - -func (v PaginationValue) Equal(o attr.Value) bool { - other, ok := o.(PaginationValue) - - if !ok { - return false - } - - if v.state != other.state { - return false - } - - if v.state != attr.ValueStateKnown { - return true - } - - if !v.Page.Equal(other.Page) { - return false - } - - if !v.Size.Equal(other.Size) { - return false - } - - if !v.Sort.Equal(other.Sort) { - return false - } - - if !v.TotalPages.Equal(other.TotalPages) { - return false - } - - if !v.TotalRows.Equal(other.TotalRows) { - return false - } - - return true -} - -func (v PaginationValue) Type(ctx context.Context) attr.Type { - return PaginationType{ - basetypes.ObjectType{ - AttrTypes: v.AttributeTypes(ctx), - }, - } -} - -func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type { - return map[string]attr.Type{ - "page": basetypes.Int64Type{}, - "size": basetypes.Int64Type{}, - "sort": basetypes.StringType{}, - "total_pages": basetypes.Int64Type{}, - "total_rows": basetypes.Int64Type{}, - } -} diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go index 985692a6..df1a8033 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/datasource.go @@ -7,13 +7,12 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-log/tflog" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" - sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" + // sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/datasources_gen" ) @@ -45,17 +44,17 @@ func (d *userDataSource) Configure( req datasource.ConfigureRequest, resp *datasource.ConfigureResponse, ) { - var ok bool - d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) - if !ok { - return - } - - apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - d.client = apiClient + //var ok bool + //d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + //if !ok { + // return + //} + // + //apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) + //if resp.Diagnostics.HasError() { + // return + //} + //d.client = apiClient tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) } diff --git a/stackit/internal/testutil/assert.go b/stackit/internal/testutil/assert.go new file mode 100644 index 00000000..80cb2104 --- /dev/null +++ b/stackit/internal/testutil/assert.go @@ -0,0 +1,11 @@ +package testutil + +import "testing" + +func Equal[V comparable](t *testing.T, got, expected V) { + t.Helper() + + if expected != got { + t.Errorf("assert equal failed:\ngot: %v \nexpected: %v", got, expected) + } +} diff --git a/stackit/internal/testutil/testutil.go b/stackit/internal/testutil/testutil.go index e2ab0c59..b678efef 100644 --- a/stackit/internal/testutil/testutil.go +++ b/stackit/internal/testutil/testutil.go @@ -3,7 +3,6 @@ package testutil import ( - "encoding/json" "fmt" "os" "path/filepath" @@ -20,7 +19,8 @@ import ( const ( // Default location of credentials JSON - credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive + // credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive + serviceAccountFilePath = ".stackit/service_account.json" ) var ( @@ -29,7 +29,7 @@ var ( // CLI command executed to create a provider server to which the CLI can // reattach. TestAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){ - "stackit": providerserver.NewProtocol6WithError(stackit.New("test-version")()), + "stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()), } // TestEphemeralAccProtoV6ProviderFactories is used to instantiate a provider during @@ -40,8 +40,8 @@ var ( // See the Terraform acceptance test documentation on ephemeral resources for more information: // https://developer.hashicorp.com/terraform/plugin/testing/acceptance-tests/ephemeral-resources TestEphemeralAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){ - "stackit": providerserver.NewProtocol6WithError(stackit.New("test-version")()), - "echo": echoprovider.NewProviderServer(), + "stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()), + "echo": echoprovider.NewProviderServer(), } // E2ETestsEnabled checks if end-to-end tests should be run. @@ -52,6 +52,8 @@ var ( // ProjectId is the id of project used for tests ProjectId = os.Getenv("TF_ACC_PROJECT_ID") Region = os.Getenv("TF_ACC_REGION") + // ServiceAccountFile is the json file of the service account + ServiceAccountFile = os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE") // ServerId is the id of a server used for some tests ServerId = getenv("TF_ACC_SERVER_ID", "") // TestProjectParentContainerID is the container id of the parent resource under which projects are created as part of the resource-manager acceptance tests @@ -97,26 +99,27 @@ var ( func ObservabilityProviderConfig() string { if ObservabilityCustomEndpoint == "" { - return `provider "stackit" { + return `provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { observability_custom_endpoint = "%s" }`, ObservabilityCustomEndpoint, ) } + func CdnProviderConfig() string { if CdnCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { enable_beta_resources = true }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { cdn_custom_endpoint = "%s" enable_beta_resources = true }`, @@ -126,10 +129,10 @@ func CdnProviderConfig() string { func DnsProviderConfig() string { if DnsCustomEndpoint == "" { - return `provider "stackit" {}` + return `provider "stackitprivatepreview" {}` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { dns_custom_endpoint = "%s" }`, DnsCustomEndpoint, @@ -139,12 +142,12 @@ func DnsProviderConfig() string { func IaaSProviderConfig() string { if IaaSCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { iaas_custom_endpoint = "%s" }`, IaaSCustomEndpoint, @@ -154,13 +157,13 @@ func IaaSProviderConfig() string { func IaaSProviderConfigWithBetaResourcesEnabled() string { if IaaSCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { enable_beta_resources = true default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { enable_beta_resources = true iaas_custom_endpoint = "%s" }`, @@ -171,13 +174,13 @@ func IaaSProviderConfigWithBetaResourcesEnabled() string { func IaaSProviderConfigWithExperiments() string { if IaaSCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" experiments = [ "routing-tables", "network" ] }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { iaas_custom_endpoint = "%s" experiments = [ "routing-tables", "network" ] }`, @@ -188,12 +191,12 @@ func IaaSProviderConfigWithExperiments() string { func KMSProviderConfig() string { if KMSCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { kms_custom_endpoint = "%s" }`, KMSCustomEndpoint, @@ -203,12 +206,12 @@ func KMSProviderConfig() string { func LoadBalancerProviderConfig() string { if LoadBalancerCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { loadbalancer_custom_endpoint = "%s" }`, LoadBalancerCustomEndpoint, @@ -218,12 +221,12 @@ func LoadBalancerProviderConfig() string { func LogMeProviderConfig() string { if LogMeCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { logme_custom_endpoint = "%s" }`, LogMeCustomEndpoint, @@ -233,12 +236,12 @@ func LogMeProviderConfig() string { func MariaDBProviderConfig() string { if MariaDBCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { mariadb_custom_endpoint = "%s" }`, MariaDBCustomEndpoint, @@ -248,13 +251,13 @@ func MariaDBProviderConfig() string { func ModelServingProviderConfig() string { if ModelServingCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" } ` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { modelserving_custom_endpoint = "%s" }`, ModelServingCustomEndpoint, @@ -264,12 +267,12 @@ func ModelServingProviderConfig() string { func MongoDBFlexProviderConfig() string { if MongoDBFlexCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { mongodbflex_custom_endpoint = "%s" }`, MongoDBFlexCustomEndpoint, @@ -279,12 +282,12 @@ func MongoDBFlexProviderConfig() string { func ObjectStorageProviderConfig() string { if ObjectStorageCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { objectstorage_custom_endpoint = "%s" }`, ObjectStorageCustomEndpoint, @@ -294,29 +297,32 @@ func ObjectStorageProviderConfig() string { func OpenSearchProviderConfig() string { if OpenSearchCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { opensearch_custom_endpoint = "%s" }`, OpenSearchCustomEndpoint, ) } -func PostgresFlexProviderConfig() string { +func PostgresFlexProviderConfig(saFile string) string { if PostgresFlexCustomEndpoint == "" { - return ` - provider "stackit" { + return fmt.Sprintf(` + provider "stackitprivatepreview" { default_region = "eu01" - }` + service_account_key_path = "%s" + }`, saFile) } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { + service_account_key_path = "%s" postgresflex_custom_endpoint = "%s" }`, + saFile, PostgresFlexCustomEndpoint, ) } @@ -324,12 +330,12 @@ func PostgresFlexProviderConfig() string { func RabbitMQProviderConfig() string { if RabbitMQCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { rabbitmq_custom_endpoint = "%s" }`, RabbitMQCustomEndpoint, @@ -339,12 +345,12 @@ func RabbitMQProviderConfig() string { func RedisProviderConfig() string { if RedisCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { redis_custom_endpoint = "%s" }`, RedisCustomEndpoint, @@ -352,53 +358,56 @@ func RedisProviderConfig() string { } func ResourceManagerProviderConfig() string { - token := GetTestProjectServiceAccountToken("") + key := GetTestProjectServiceAccountJson("") if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" { return fmt.Sprintf(` - provider "stackit" { - service_account_token = "%s" + provider "stackitprivatepreview" { + service_account_key = "%s" }`, - token, + key, ) } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { resourcemanager_custom_endpoint = "%s" authorization_custom_endpoint = "%s" service_account_token = "%s" }`, ResourceManagerCustomEndpoint, AuthorizationCustomEndpoint, - token, + key, ) } func SecretsManagerProviderConfig() string { if SecretsManagerCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { secretsmanager_custom_endpoint = "%s" }`, SecretsManagerCustomEndpoint, ) } -func SQLServerFlexProviderConfig() string { +func SQLServerFlexProviderConfig(saFile string) string { if SQLServerFlexCustomEndpoint == "" { - return ` - provider "stackit" { + return fmt.Sprintf(` + provider "stackitprivatepreview" { default_region = "eu01" - }` + service_account_key_path = "%s" + }`, saFile) } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { + service_account_key_path = "%s" sqlserverflex_custom_endpoint = "%s" }`, + saFile, SQLServerFlexCustomEndpoint, ) } @@ -406,13 +415,13 @@ func SQLServerFlexProviderConfig() string { func ServerBackupProviderConfig() string { if ServerBackupCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" enable_beta_resources = true }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { server_backup_custom_endpoint = "%s" enable_beta_resources = true }`, @@ -423,13 +432,13 @@ func ServerBackupProviderConfig() string { func ServerUpdateProviderConfig() string { if ServerUpdateCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" enable_beta_resources = true }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { server_update_custom_endpoint = "%s" enable_beta_resources = true }`, @@ -440,12 +449,12 @@ func ServerUpdateProviderConfig() string { func SKEProviderConfig() string { if SKECustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { ske_custom_endpoint = "%s" }`, SKECustomEndpoint, @@ -455,13 +464,13 @@ func SKEProviderConfig() string { func AuthorizationProviderConfig() string { if AuthorizationCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" experiments = ["iam"] }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { authorization_custom_endpoint = "%s" experiments = ["iam"] }`, @@ -472,13 +481,13 @@ func AuthorizationProviderConfig() string { func ServiceAccountProviderConfig() string { if ServiceAccountCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" enable_beta_resources = true }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { service_account_custom_endpoint = "%s" enable_beta_resources = true }`, @@ -489,13 +498,13 @@ func ServiceAccountProviderConfig() string { func GitProviderConfig() string { if GitCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" enable_beta_resources = true }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { git_custom_endpoint = "%s" enable_beta_resources = true }`, @@ -506,12 +515,12 @@ func GitProviderConfig() string { func ScfProviderConfig() string { if ScfCustomEndpoint == "" { return ` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" }` } return fmt.Sprintf(` - provider "stackit" { + provider "stackitprivatepreview" { default_region = "eu01" scf_custom_endpoint = "%s" }`, @@ -526,11 +535,11 @@ func ResourceNameWithDateTime(name string) string { return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed) } -func GetTestProjectServiceAccountToken(path string) string { +func GetTestProjectServiceAccountJson(path string) string { var err error - token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN") + token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_JSON") if !tokenSet || token == "" { - token, err = readTestTokenFromCredentialsFile(path) + token, err = readTestServiceAccountJsonFromFile(path) if err != nil { return "" } @@ -538,11 +547,53 @@ func GetTestProjectServiceAccountToken(path string) string { return token } -func readTestTokenFromCredentialsFile(path string) (string, error) { +//func GetTestProjectServiceAccountToken(path string) string { +// var err error +// token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN") +// if !tokenSet || token == "" { +// token, err = readTestTokenFromCredentialsFile(path) +// if err != nil { +// return "" +// } +// } +// return token +//} +// +//func readTestTokenFromCredentialsFile(path string) (string, error) { +// if path == "" { +// customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH") +// if !customPathSet || customPath == "" { +// path = credentialsFilePath +// home, err := os.UserHomeDir() +// if err != nil { +// return "", fmt.Errorf("getting home directory: %w", err) +// } +// path = filepath.Join(home, path) +// } else { +// path = customPath +// } +// } +// +// credentialsRaw, err := os.ReadFile(path) +// if err != nil { +// return "", fmt.Errorf("opening file: %w", err) +// } +// +// var credentials struct { +// TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN string `json:"TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN"` +// } +// err = json.Unmarshal(credentialsRaw, &credentials) +// if err != nil { +// return "", fmt.Errorf("unmarshalling credentials: %w", err) +// } +// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil +//} + +func readTestServiceAccountJsonFromFile(path string) (string, error) { if path == "" { - customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH") + customPath, customPathSet := os.LookupEnv("STACKIT_SERVICE_ACCOUNT_PATH") if !customPathSet || customPath == "" { - path = credentialsFilePath + path = serviceAccountFilePath home, err := os.UserHomeDir() if err != nil { return "", fmt.Errorf("getting home directory: %w", err) @@ -557,15 +608,7 @@ func readTestTokenFromCredentialsFile(path string) (string, error) { if err != nil { return "", fmt.Errorf("opening file: %w", err) } - - var credentials struct { - TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN string `json:"TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN"` - } - err = json.Unmarshal(credentialsRaw, &credentials) - if err != nil { - return "", fmt.Errorf("unmarshalling credentials: %w", err) - } - return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil + return string(credentialsRaw), nil } func getenv(key, defaultValue string) string { diff --git a/stackit/provider_acc_test.go b/stackit/provider_acc_test.go index cfd6095f..7045874b 100644 --- a/stackit/provider_acc_test.go +++ b/stackit/provider_acc_test.go @@ -5,12 +5,15 @@ package stackit_test import ( _ "embed" "fmt" + "log/slog" "os" "path" "regexp" "runtime" "testing" + "github.com/joho/godotenv" + "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -31,6 +34,22 @@ var testConfigProviderCredentials = config.Variables{ "name": config.StringVariable(fmt.Sprintf("tf-acc-prov%s", acctest.RandStringFromCharSet(3, acctest.CharSetAlphaNum))), } +func setup() { + err := godotenv.Load() + if err != nil { + slog.Info("could not find .env file - not loading .env") + return + } + slog.Info("loaded .env file") +} + +func TestMain(m *testing.M) { + setup() + code := m.Run() + // shutdown() + os.Exit(code) +} + // Helper function to obtain the home directory on different systems. // Based on os.UserHomeDir(). func getHomeEnvVariableName() string { From b1b359f436b52e24e0dc4923ed851b058dcfc76f Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Mon, 9 Feb 2026 07:28:01 +0000 Subject: [PATCH 04/41] feat: refactor testing (#35) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/35 Reviewed-by: Andre_Harms Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../postgres-flex/alpha/database_config.yml | 1 - .../postgresflexalpha/instance/functions.go | 1 + .../sqlserverflexbeta/instance/functions.go | 10 +- .../sqlserverflexbeta/instance/resource.go | 90 ++-- .../instance/resource_test.go | 430 ++++++++++++++++++ .../internal/wait/sqlserverflexbeta/wait.go | 14 +- 6 files changed, 486 insertions(+), 60 deletions(-) create mode 100644 stackit/internal/services/sqlserverflexbeta/instance/resource_test.go diff --git a/service_specs/postgres-flex/alpha/database_config.yml b/service_specs/postgres-flex/alpha/database_config.yml index 7af8d47b..c7bb4e29 100644 --- a/service_specs/postgres-flex/alpha/database_config.yml +++ b/service_specs/postgres-flex/alpha/database_config.yml @@ -1,4 +1,3 @@ - provider: name: stackitprivatepreview diff --git a/stackit/internal/services/postgresflexalpha/instance/functions.go b/stackit/internal/services/postgresflexalpha/instance/functions.go index ac40f185..dc29abe4 100644 --- a/stackit/internal/services/postgresflexalpha/instance/functions.go +++ b/stackit/internal/services/postgresflexalpha/instance/functions.go @@ -36,6 +36,7 @@ func mapGetInstanceResponseToModel(ctx context.Context, m *postgresflexalphareso }) m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) + m.Encryption = postgresflexalpharesource.NewEncryptionValueNull() if resp.HasEncryption() { m.Encryption = postgresflexalpharesource.NewEncryptionValueMust( m.Encryption.AttributeTypes(ctx), diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go index d66f358e..e9e1db57 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go @@ -293,12 +293,10 @@ func toUpdatePayload( BackupSchedule: m.BackupSchedule.ValueStringPointer(), FlavorId: m.FlavorId.ValueStringPointer(), Name: m.Name.ValueStringPointer(), - Network: &sqlserverflexbeta.UpdateInstanceRequestPayloadNetwork{ - Acl: &netAcl, - }, - Replicas: &replVal, - RetentionDays: m.RetentionDays.ValueInt64Pointer(), - Storage: &sqlserverflexbeta.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()}, + Network: sqlserverflexbeta.NewUpdateInstanceRequestPayloadNetwork(netAcl), + Replicas: &replVal, + RetentionDays: m.RetentionDays.ValueInt64Pointer(), + Storage: &sqlserverflexbeta.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()}, Version: sqlserverflexbeta.UpdateInstanceRequestPayloadGetVersionAttributeType( m.Version.ValueStringPointer(), ), diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go index a6325169..5d0b47d8 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -141,18 +141,6 @@ func (r *instanceResource) ModifyPlan( return } - var identityModel InstanceResourceIdentityModel - identityModel.ProjectID = planModel.ProjectId - identityModel.Region = planModel.Region - if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { - identityModel.InstanceID = planModel.InstanceId - } - - resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) - if resp.Diagnostics.HasError() { - return - } - resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) if resp.Diagnostics.HasError() { return @@ -164,6 +152,7 @@ var modifiersFileByte []byte func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { var data sqlserverflexbetaResGen.InstanceModel + crateErr := "[SQL Server Flex BETA - Create] error" // Read Terraform plan data into the model resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) @@ -172,17 +161,10 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques return } - // Read identity data - var identityData InstanceResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) @@ -192,7 +174,7 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques core.LogAndAddError( ctx, &resp.Diagnostics, - "Error creating Instance", + crateErr, fmt.Sprintf("Creating API payload: %v", err), ) return @@ -204,7 +186,7 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques region, ).CreateInstanceRequestPayload(*payload).Execute() if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating Instance", fmt.Sprintf("Calling API: %v", err)) + core.LogAndAddError(ctx, &resp.Diagnostics, crateErr, fmt.Sprintf("Calling API: %v", err)) return } @@ -240,7 +222,7 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques core.LogAndAddError( ctx, &resp.Diagnostics, - "Error creating Instance", + crateErr, fmt.Sprintf("Instance creation waiting: %v", err), ) return @@ -250,7 +232,7 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques core.LogAndAddError( ctx, &resp.Diagnostics, - "Error creating Instance", + crateErr, "Instance creation waiting: returned id is nil", ) return @@ -262,8 +244,8 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques core.LogAndAddError( ctx, &resp.Diagnostics, - "Error creating Instance", - fmt.Sprintf("Processing API payload: %v", err), + crateErr, + fmt.Sprintf("processing API payload: %v", err), ) return } @@ -292,8 +274,8 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) @@ -325,9 +307,6 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r return } - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - identity := InstanceResourceIdentityModel{ ProjectID: types.StringValue(projectId), Region: types.StringValue(region), @@ -338,6 +317,9 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r return } + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + tflog.Info(ctx, "sqlserverflexbeta.Instance read") } @@ -345,23 +327,15 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques var data sqlserverflexbetaResGen.InstanceModel updateInstanceError := "Error updating instance" - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - // Read identity data - var identityData InstanceResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) if resp.Diagnostics.HasError() { return } ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) @@ -393,7 +367,11 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques ctx = core.LogResponse(ctx) - waitResp, err := wait.UpdateInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx) + waitResp, err := wait. + UpdateInstanceWaitHandler(ctx, r.client, projectId, instanceId, region). + SetSleepBeforeWait(15 * time.Second). + SetTimeout(45 * time.Minute). + WaitWithContext(ctx) if err != nil { core.LogAndAddError( ctx, @@ -416,6 +394,16 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques return } + identity := InstanceResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Save updated data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) @@ -457,7 +445,7 @@ func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteReques ctx = core.LogResponse(ctx) - _, err = wait.DeleteInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx) + delResp, err := wait.DeleteInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx) if err != nil { core.LogAndAddError( ctx, @@ -468,6 +456,18 @@ func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteReques return } + if delResp != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error deleting instance", + "wait handler returned non nil result", + ) + return + } + + resp.State.RemoveResource(ctx) + tflog.Info(ctx, "sqlserverflexbeta.Instance deleted") } diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go b/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go new file mode 100644 index 00000000..64acf850 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go @@ -0,0 +1,430 @@ +package sqlserverflexbeta + +import ( + "bytes" + "context" + "fmt" + "log" + "strings" + "testing" + "text/template" + + "github.com/hashicorp/terraform-plugin-testing/compare" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/knownvalue" + "github.com/hashicorp/terraform-plugin-testing/statecheck" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-plugin-testing/tfjsonpath" + "github.com/hashicorp/terraform-plugin-testing/tfversion" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +const resourceString = "stackitprivatepreview_sqlserverflexbeta_instance" + +var ( + validSingleFlavor = "4.16-Single" + kekKeyRingId = "" + kekKeyVersion = "" + kekKeySA = "" +) + +func TestMain(m *testing.M) { + resource.TestMain(m) +} + +func init() { + resource.AddTestSweepers(resourceString, &resource.Sweeper{ + Name: resourceString, + F: func(region string) error { + client, err := sharedClientForRegion(region) + if err != nil { + return fmt.Errorf("error getting client: %s", err) + } + conn := client.(*sqlserverflexbeta.APIClient) + + ctx := context.Background() + instances, err := conn.ListInstancesRequest(ctx, testutil.ProjectId, region).Execute() + if err != nil { + return fmt.Errorf("error getting instances: %s", err) + } + for _, instance := range instances.GetInstances() { + if strings.HasPrefix(instance.GetName(), "test-acc") { + err := conn.DeleteInstanceRequestExecute(ctx, testutil.ProjectId, region, instance.GetId()) + if err != nil { + log.Printf("error destroying %s during sweep: %s", instance.GetName(), err) + } + } + } + return nil + }, + }) +} + +// sharedClientForRegion returns a common provider client configured for the specified region +func sharedClientForRegion(region string) (any, error) { + providerData := core.ProviderData{} + if region != "" { + providerData.DefaultRegion = region + } + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(providerData.RoundTripper), + utils.UserAgentConfigOption(providerData.Version), + } + if providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(providerData.SQLServerFlexCustomEndpoint)) + } else { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(providerData.GetRegion())) + } + apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...) + if err != nil { + return nil, err + } + return apiClient, nil +} + +func testAccPreCheck(t *testing.T) { + // TODO: if needed ... +} + +type resData struct { + WithEncryption bool + ServiceAccountFilePath string + ProjectID string + Region string + TfName string + Name string + FlavorID string + BackupSchedule string + RetentionDays uint32 + Replicas uint32 + PerformanceClass string + Size uint32 + Acl string + AccessScope string + Version string + KekKeyId string + KekKeyRingId string + KekKeyVersion uint8 + KekSaEmail string +} + +func getSingleExample() resData { + tmpName := acctest.RandomWithPrefix("tf-acc") + return resData{ + WithEncryption: false, + Region: testutil.Region, + ServiceAccountFilePath: testutil.ServiceAccountFile, + ProjectID: testutil.ProjectId, + Name: tmpName, + TfName: tmpName, + FlavorID: validSingleFlavor, + BackupSchedule: "0 0 * * *", + RetentionDays: 33, + PerformanceClass: "premium-perf2-stackit", + Size: 10, + Acl: "0.0.0.0/0", + AccessScope: "PUBLIC", + Version: "2022", + } +} + +func TestAccResourceExample_basic(t *testing.T) { + exData := getSingleExample() + t.Logf("[INFO] resource name: %s", exData.TfName) + + exBefore := testAccResourceExampleConfig(exData) + + updData := exData + // oldName := exData.Name + updData.Name = "newname" + updBefore := testAccResourceExampleConfig(updData) + + resName := fmt.Sprintf("%s.%s", resourceString, exData.TfName) + var resourceID string + + compareValuesSame := statecheck.CompareValue(compare.ValuesSame()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.SkipBelow(tfversion.Version1_10_0), + }, + CheckDestroy: testAccCheckExampleResourceDestroy, + Steps: []resource.TestStep{ + // test create + { + Config: exBefore, + ConfigStateChecks: []statecheck.StateCheck{ + compareValuesSame.AddStateValue( + resName, + tfjsonpath.New("edition"), + ), + statecheck.ExpectKnownValue( + resName, + tfjsonpath.New("edition"), + knownvalue.StringExact("Standard"), + ), + //statecheck.ExpectSensitiveValue(resName, + // tfjsonpath.New("sensitive_string_attribute")), + }, + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + t.Logf("[INFO] resourceID: %+v", resourceID) + return nil + }, + testAccGrabResourceID(resName, &resourceID), + func(s *terraform.State) error { + t.Logf("[INFO] resourceID: %s", resourceID) + return nil + }, + testCheckResourceExists(resName), + resource.TestCheckResourceAttrSet(resName, "id"), + //resource.TestCheckResourceAttr(resName, "id", resourceID), + resource.TestCheckResourceAttr(resName, "name", exData.Name), + ), + }, + // up to here we should see no plan drift + { + Config: exBefore, + PlanOnly: true, + ExpectNonEmptyPlan: false, + }, + // test update + { + Config: updBefore, + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + t.Logf("[INFO] resourceID: %s", resourceID) + return nil + }, + testCheckResourceExists(resName), + resource.TestCheckResourceAttrSet(resName, "id"), + //resource.TestCheckResourceAttr(resName, "id", resourceID), + resource.TestCheckResourceAttr(resName, "name", updData.Name), + ), + }, + // check for plan drift after update + { + Config: exBefore, + PlanOnly: true, + ExpectNonEmptyPlan: false, + }, + //// Import test + //{ + // ResourceName: resName, + // ImportState: true, + // ImportStateVerify: true, + //}, + }, + }) +} + +func testAccCheckExampleResourceDestroy(state *terraform.State) error { + //// retrieve the connection established in Provider configuration + //conn := testAccProvider.Meta().(*ExampleClient) + + // loop through the resources in state, verifying each widget + // is destroyed + for _, rs := range state.RootModule().Resources { + if rs.Type != resourceString { + continue + } + + fmt.Println(rs.String()) + // rs.Primary.ID + + //// Retrieve our widget by referencing it's state ID for API lookup + //request := &example.DescribeWidgets{ + // IDs: []string{rs.Primary.ID}, + //} + // + //response, err := conn.DescribeWidgets(request) + //if err == nil { + // if len(response.Widgets) > 0 && *response.Widgets[0].ID == rs.Primary.ID { + // return fmt.Errorf("Widget (%s) still exists.", rs.Primary.ID) + // } + // + // return nil + //} + // + //// If the error is equivalent to 404 not found, the widget is destroyed. + //// otherwise return the error + //if !strings.Contains(err.Error(), "Widget not found") { + // return err + //} + } + + return nil +} + +func testAccResourceExampleConfig(data resData) string { + tpl := ` +resource "stackitprivatepreview_sqlserverflexbeta_instance" "{{ .TfName }}" { + project_id = "{{ .ProjectID }}" + name = "{{ .Name }}" + backup_schedule = "{{ .BackupSchedule }}" + retention_days = {{ .RetentionDays }} + flavor_id = "{{ .FlavorID }}" + storage = { + class = "{{ .PerformanceClass }}" + size = {{ .Size }} + } + network = { + acl = ["{{ .Acl }}"] + access_scope = "{{ .AccessScope }}" + } +{{ if .WithEncryption }} + encryption = { + kek_key_id = "{{ .KekKeyId }}" + kek_key_ring_id = "{{ .KekKeyRingId }}" + kek_key_version = {{ .KekKeyVersion }} + service_account = "{{ .KekSaEmail }}" + } +{{ end }} + version = "{{ .Version }}" +} +` + tmpl, err := template.New("").Parse(tpl) + if err != nil { + log.Fatalln(err) + } + buff := new(bytes.Buffer) + err = tmpl.Execute(buff, data) + if err != nil { + log.Fatalln(err) + } + + res := fmt.Sprintf(` +%[1]s + +%[2]s +`, + testutil.PostgresFlexProviderConfig(data.ServiceAccountFilePath), + buff.String(), + ) + + return res +} + +func testCheckResourceExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("resource not found: %s", resourceName) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("resource ID not set") + } + + // Verify resource exists in the API + //client := testAccProvider.Meta().(*APIClient) + //_, err := client.GetResource(rs.Primary.ID) + //if err != nil { + // return fmt.Errorf("error fetching resource: %w", err) + //} + + return nil + } +} + +func testAccGrabResourceID(resourceName string, id *string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("ressource not found: %s", resourceName) + } + if rs.Primary.ID == "" { + return fmt.Errorf("no ID in state for %s", resourceName) + } + *id = rs.Primary.ID + return nil + } +} + +//func setupMockServer() *httptest.Server { +// mux := http.NewServeMux() +// +// mux.HandleFunc("/api/resources", func(w http.ResponseWriter, r *http.Request) { +// switch r.Method { +// case http.MethodPost: +// w.WriteHeader(http.StatusCreated) +// json.NewEncoder(w).Encode(map[string]string{ +// "id": "mock-id-123", +// "name": "test-resource", +// }) +// case http.MethodGet: +// w.WriteHeader(http.StatusOK) +// json.NewEncoder(w).Encode([]map[string]string{}) +// } +// }) +// +// return httptest.NewServer(mux) +//} +// +//func TestUnitResourceCreate(t *testing.T) { +// server := setupMockServer() +// defer server.Close() +// +// // Configure provider to use mock server URL +// os.Setenv("API_ENDPOINT", server.URL) +// +// // Run unit tests against mock +//} + +// type postgresFlexClientMocked struct { +// returnError bool +// getFlavorsResp *postgresflex.GetFlavorsResponse +// } +// +// func (c *postgresFlexClientMocked) ListFlavorsExecute(_ context.Context, _, _ string) (*postgresflex.GetFlavorsResponse, error) { +// if c.returnError { +// return nil, fmt.Errorf("get flavors failed") +// } +// +// return c.getFlavorsResp, nil +// } + +//func TestNewInstanceResource(t *testing.T) { +// exData := resData{ +// Region: "eu01", +// ServiceAccountFilePath: sa_file, +// ProjectID: project_id, +// Name: "testRes", +// } +// resource.Test(t, resource.TestCase{ +// ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, +// Steps: []resource.TestStep{ +// { +// Config: testAccResourceEncryptionExampleConfig(exData), +// Check: resource.ComposeAggregateTestCheckFunc( +// resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), +// resource.TestCheckResourceAttrSet("example_resource.test", "id"), +// ), +// }, +// }, +// }) +// +// //tests := []struct { +// // name string +// // want resource.Resource +// //}{ +// // { +// // name: "create empty instance resource", +// // want: &instanceResource{}, +// // }, +// //} +// //for _, tt := range tests { +// // t.Run(tt.name, func(t *testing.T) { +// // if got := NewInstanceResource(); !reflect.DeepEqual(got, tt.want) { +// // t.Errorf("NewInstanceResource() = %v, want %v", got, tt.want) +// // } +// // }) +// //} +//} diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go index bd813d82..36da00b1 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait.go @@ -96,7 +96,7 @@ func UpdateInstanceWaitHandler(ctx context.Context, a APIClientInterface, projec tflog.Info(ctx, "request is being handled", map[string]interface{}{ "status": *s.Status, }) - return false, nil, nil + return false, s, nil default: tflog.Info(ctx, "Wait (update) received unknown status", map[string]interface{}{ "instanceId": instanceId, @@ -105,17 +105,15 @@ func UpdateInstanceWaitHandler(ctx context.Context, a APIClientInterface, projec return false, s, nil } }) - handler.SetSleepBeforeWait(15 * time.Second) - handler.SetTimeout(45 * time.Minute) return handler } // DeleteInstanceWaitHandler will wait for instance deletion -func DeleteInstanceWaitHandler(ctx context.Context, a APIClientInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[struct{}] { - handler := wait.New(func() (waitFinished bool, response *struct{}, err error) { - _, err = a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) +func DeleteInstanceWaitHandler(ctx context.Context, a APIClientInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { + handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { + s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) if err == nil { - return false, nil, nil + return false, s, nil } var oapiErr *oapierror.GenericOpenAPIError ok := errors.As(err, &oapiErr) @@ -127,7 +125,7 @@ func DeleteInstanceWaitHandler(ctx context.Context, a APIClientInterface, projec } return true, nil, nil }) - handler.SetTimeout(15 * time.Minute) + handler.SetTimeout(30 * time.Minute) return handler } From de019908d2b5419cf398f7d40b1c5d8a52f3bc10 Mon Sep 17 00:00:00 2001 From: Andre Harms Date: Tue, 10 Feb 2026 08:10:02 +0000 Subject: [PATCH 05/41] chore: changed and refactored providers (#36) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Co-authored-by: Marcel S. Henselin Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/36 Reviewed-by: Marcel_Henselin Co-authored-by: Andre Harms Co-committed-by: Andre Harms --- cmd/cmd/build/build.go | 76 +- .../data-sources/postgresflexalpha_flavors.md | 2 +- .../postgresflexalpha_instance.md | 2 +- .../sqlserverflexalpha_database.md | 2 +- .../sqlserverflexalpha_instance.md | 2 +- .../sqlserverflexbeta_instance.md | 2 +- .../postgresflexalpha/database/datasource.go | 196 +- .../database_data_source_gen.go | 69 + .../postgresflexalpha/database/functions.go | 10 + .../database/functions_test.go | 46 +- .../postgresflexalpha/database/mapper.go | 92 + .../postgresflexalpha/database/mapper_test.go | 240 +++ .../database/planModifiers.yaml | 35 + .../postgresflexalpha/database/resource.go | 443 ++-- .../database/resource_test.go | 232 -- .../resources_gen/database_resource_gen.go | 41 +- .../flavor/functions_test.go | 4 +- .../postgresflexalpha/flavors/datasource.go | 17 +- .../flavors_data_source_gen.go | 2 +- .../postgresflexalpha/instance/datasource.go | 27 +- .../instance_data_source_gen.go | 4 +- .../postgresflexalpha/instance/functions.go | 62 +- .../postgresflexalpha/instance/resource.go | 159 +- .../postgresflex_acc_test.go | 5 + .../postgresflexalpha/user/datasource.go | 210 +- .../postgresflexalpha/user/datasource_test.go | 146 -- .../datasources_gen/user_data_source_gen.go | 36 +- .../services/postgresflexalpha/user/mapper.go | 142 ++ .../postgresflexalpha/user/mapper_test.go | 569 +++++ .../postgresflexalpha/user/planModifiers.yaml | 55 + .../postgresflexalpha/user/resource.go | 561 ++--- .../postgresflexalpha/user/resource_test.go | 448 ---- .../user/resources_gen/user_resource_gen.go | 12 - .../sqlserverflexalpha/database/datasource.go | 31 +- .../database_data_source_gen.go | 4 +- .../database/planModifiers.yaml | 50 + .../sqlserverflexalpha/database/resource.go | 147 +- .../sqlserverflexalpha/flavor/datasource.go | 32 +- .../sqlserverflexalpha/flavors/datasource.go | 18 +- .../flavors_data_source_gen.go | 2 +- .../sqlserverflexalpha/instance/datasource.go | 198 +- .../instance_data_source_gen.go | 4 +- .../sqlserverflexalpha/instance/functions.go | 80 +- .../sqlserverflexalpha/instance/resource.go | 327 +-- .../sqlserverflexalpha/user/datasource.go | 16 +- .../user/datasource_test.go | 16 +- .../datasources_gen/user_data_source_gen.go | 2 +- .../user/planModifiers.yaml | 58 + .../sqlserverflexalpha/user/resource.go | 261 +-- .../sqlserverflexalpha/user/resource_test.go | 92 +- .../sqlserverflexbeta/database/datasource.go | 18 +- ...rce_fix.go => database_data_source_gen.go} | 0 .../database/planModifiers.yaml | 50 + .../sqlserverflexbeta/database/resource.go | 73 +- .../sqlserverflexbeta/flavors/datasource.go | 155 ++ .../flavors_data_source_gen.go | 1909 +++++++++++++++++ .../sqlserverflexbeta/instance/datasource.go | 15 +- .../instance_data_source_gen.go | 4 +- .../sqlserverflexbeta/instance/functions.go | 4 +- .../sqlserverflexbeta/instance/resource.go | 80 +- .../instance/resource_test.go | 7 + .../sqlserverflexbeta/user/datasource.go | 29 +- .../datasources_gen/user_data_source_gen.go | 2 +- .../sqlserverflexbeta/user/functions.go | 105 +- .../sqlserverflexbeta/user/planModifiers.yaml | 51 + .../sqlserverflexbeta/user/resource.go | 139 +- .../version_data_source_gen.go | 569 +++++ .../utils/planModifiers.go | 0 stackit/internal/utils/planModifiers_test.go | 224 ++ .../wait/sqlserverflexbeta/wait_test.go | 137 +- 70 files changed, 6250 insertions(+), 2608 deletions(-) create mode 100644 stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go create mode 100644 stackit/internal/services/postgresflexalpha/database/mapper.go create mode 100644 stackit/internal/services/postgresflexalpha/database/mapper_test.go create mode 100644 stackit/internal/services/postgresflexalpha/database/planModifiers.yaml delete mode 100644 stackit/internal/services/postgresflexalpha/database/resource_test.go delete mode 100644 stackit/internal/services/postgresflexalpha/user/datasource_test.go create mode 100644 stackit/internal/services/postgresflexalpha/user/mapper.go create mode 100644 stackit/internal/services/postgresflexalpha/user/mapper_test.go create mode 100644 stackit/internal/services/postgresflexalpha/user/planModifiers.yaml delete mode 100644 stackit/internal/services/postgresflexalpha/user/resource_test.go create mode 100644 stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml create mode 100644 stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml rename stackit/internal/services/sqlserverflexbeta/database/datasources_gen/{database_data_source_fix.go => database_data_source_gen.go} (100%) create mode 100644 stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml create mode 100644 stackit/internal/services/sqlserverflexbeta/flavors/datasource.go create mode 100644 stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go create mode 100644 stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml create mode 100644 stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go rename stackit/internal/{services/postgresflexalpha => }/utils/planModifiers.go (100%) create mode 100644 stackit/internal/utils/planModifiers_test.go diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go index f210b8d2..30fbf552 100644 --- a/cmd/cmd/build/build.go +++ b/cmd/cmd/build/build.go @@ -1,6 +1,7 @@ package build import ( + "bufio" "bytes" "errors" "fmt" @@ -509,7 +510,7 @@ func generateServiceFiles(rootDir, generatorDir string) error { oasFile := path.Join(generatorDir, "oas", fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name())) if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) { - slog.Warn(" coulc not find matching oas", "svc", service.Name(), "version", svcVersion.Name()) + slog.Warn(" could not find matching oas", "svc", service.Name(), "version", svcVersion.Name()) continue } @@ -648,6 +649,15 @@ func generateServiceFiles(rootDir, generatorDir string) error { return err } } + + tfAnoErr := handleTfTagForDatasourceFile( + path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)), + scName, + resource, + ) + if tfAnoErr != nil { + return tfAnoErr + } } } } @@ -655,6 +665,70 @@ func generateServiceFiles(rootDir, generatorDir string) error { return nil } +// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id" +func handleTfTagForDatasourceFile(filePath, service, resource string) error { + slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource) + if !fileExists(filePath) { + slog.Warn(" could not find file, skipping", "path", filePath) + return nil + } + f, err := os.Open(filePath) + if err != nil { + return err + } + defer f.Close() + + tmp, err := os.CreateTemp("", "replace-*") + if err != nil { + return err + } + defer tmp.Close() + + sc := bufio.NewScanner(f) + for sc.Scan() { + resLine, err := handleLine(sc.Text()) + if err != nil { + return err + } + if _, err := io.WriteString(tmp, resLine+"\n"); err != nil { + return err + } + } + if scErr := sc.Err(); scErr != nil { + return scErr + } + + if err := tmp.Close(); err != nil { + return err + } + + if err := f.Close(); err != nil { + return err + } + + if err := os.Rename(tmp.Name(), filePath); err != nil { + log.Fatal(err) + } + return nil +} + +func handleLine(line string) (string, error) { + schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`) + + schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1) + if schemaMatches != nil { + return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil + } + + modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`) + modelMatches := modelRegex.FindAllStringSubmatch(line, -1) + if modelMatches != nil { + return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil + } + + return line, nil +} + func checkCommands(commands []string) error { for _, commandName := range commands { if !commandExists(commandName) { diff --git a/docs/data-sources/postgresflexalpha_flavors.md b/docs/data-sources/postgresflexalpha_flavors.md index f90ae257..06645bb4 100644 --- a/docs/data-sources/postgresflexalpha_flavors.md +++ b/docs/data-sources/postgresflexalpha_flavors.md @@ -38,12 +38,12 @@ Read-Only: - `cpu` (Number) The cpu count of the instance. - `description` (String) The flavor description. -- `id` (String) The id of the instance flavor. - `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte. - `memory` (Number) The memory of the instance in Gibibyte. - `min_gb` (Number) minimum storage which is required to order in Gigabyte. - `node_type` (String) defines the nodeType it can be either single or replica - `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--flavors--storage_classes)) +- `tf_original_api_id` (String) The id of the instance flavor. ### Nested Schema for `flavors.storage_classes` diff --git a/docs/data-sources/postgresflexalpha_instance.md b/docs/data-sources/postgresflexalpha_instance.md index 54d887ea..466745a6 100644 --- a/docs/data-sources/postgresflexalpha_instance.md +++ b/docs/data-sources/postgresflexalpha_instance.md @@ -37,7 +37,6 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" { ⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption)) - `flavor_id` (String) The id of the instance flavor. -- `id` (String) The ID of the instance. - `is_deletable` (Boolean) Whether the instance can be deleted or not. - `name` (String) The name of the instance. - `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) @@ -45,6 +44,7 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" { - `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days. - `status` (String) The current status of the instance. - `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) +- `tf_original_api_id` (String) The ID of the instance. - `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters. diff --git a/docs/data-sources/sqlserverflexalpha_database.md b/docs/data-sources/sqlserverflexalpha_database.md index 4aab99cc..5db648f4 100644 --- a/docs/data-sources/sqlserverflexalpha_database.md +++ b/docs/data-sources/sqlserverflexalpha_database.md @@ -26,6 +26,6 @@ description: |- - `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. - `compatibility_level` (Number) CompatibilityLevel of the Database. -- `id` (Number) The id of the database. - `name` (String) The name of the database. - `owner` (String) The owner of the database. +- `tf_original_api_id` (Number) The id of the database. diff --git a/docs/data-sources/sqlserverflexalpha_instance.md b/docs/data-sources/sqlserverflexalpha_instance.md index 134eb567..b05d7b8e 100644 --- a/docs/data-sources/sqlserverflexalpha_instance.md +++ b/docs/data-sources/sqlserverflexalpha_instance.md @@ -34,7 +34,6 @@ data "stackitprivatepreview_sqlserverflexalpha_instance" "example" { - `edition` (String) Edition of the MSSQL server instance - `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) - `flavor_id` (String) The id of the instance flavor. -- `id` (String) The ID of the instance. - `is_deletable` (Boolean) Whether the instance can be deleted or not. - `name` (String) The name of the instance. - `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) @@ -42,6 +41,7 @@ data "stackitprivatepreview_sqlserverflexalpha_instance" "example" { - `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365 - `status` (String) - `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) +- `tf_original_api_id` (String) The ID of the instance. - `version` (String) The sqlserver version used for the instance. diff --git a/docs/data-sources/sqlserverflexbeta_instance.md b/docs/data-sources/sqlserverflexbeta_instance.md index cc3645ef..431f95f1 100644 --- a/docs/data-sources/sqlserverflexbeta_instance.md +++ b/docs/data-sources/sqlserverflexbeta_instance.md @@ -34,7 +34,6 @@ data "stackitprivatepreview_sqlserverflexbeta_instance" "example" { - `edition` (String) Edition of the MSSQL server instance - `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) - `flavor_id` (String) The id of the instance flavor. -- `id` (String) The ID of the instance. - `is_deletable` (Boolean) Whether the instance can be deleted or not. - `name` (String) The name of the instance. - `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) @@ -42,6 +41,7 @@ data "stackitprivatepreview_sqlserverflexbeta_instance" "example" { - `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365 - `status` (String) - `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) +- `tf_original_api_id` (String) The ID of the instance. - `version` (String) The sqlserver version used for the instance. diff --git a/stackit/internal/services/postgresflexalpha/database/datasource.go b/stackit/internal/services/postgresflexalpha/database/datasource.go index 36fc5333..4a89be17 100644 --- a/stackit/internal/services/postgresflexalpha/database/datasource.go +++ b/stackit/internal/services/postgresflexalpha/database/datasource.go @@ -5,18 +5,17 @@ import ( "fmt" "net/http" - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-log/tflog" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate" - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen" + postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" ) @@ -30,6 +29,12 @@ func NewDatabaseDataSource() datasource.DataSource { return &databaseDataSource{} } +// dataSourceModel maps the data source schema data. +type dataSourceModel struct { + postgresflexalpha2.DatabaseModel + TerraformID types.String `tfsdk:"id"` +} + // databaseDataSource is the data source implementation. type databaseDataSource struct { client *postgresflexalpha.APIClient @@ -66,132 +71,46 @@ func (r *databaseDataSource) Configure( } // Schema defines the schema for the data source. -func (r *databaseDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - descriptions := map[string]string{ - "main": "Postgres Flex database resource schema. Must have a `region` specified in the provider configuration.", - "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".", - "database_id": "Database ID.", - "instance_id": "ID of the Postgres Flex instance.", - "project_id": "STACKIT project ID to which the instance is associated.", - "name": "Database name.", - "owner": "Username of the database owner.", - "region": "The resource region. If not defined, the provider region is used.", +func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + + s := postgresflexalpha2.DatabaseDataSourceSchema(ctx) + s.Attributes["id"] = schema.StringAttribute{ + Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," + + "`database_id`\\\".\",", + Computed: true, } - resp.Schema = schema.Schema{ - Description: descriptions["main"], - Attributes: map[string]schema.Attribute{ - "id": schema.StringAttribute{ - Description: descriptions["id"], - Computed: true, - }, - "database_id": schema.Int64Attribute{ - Description: descriptions["database_id"], - Optional: true, - Computed: true, - }, - "instance_id": schema.StringAttribute{ - Description: descriptions["instance_id"], - Required: true, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "project_id": schema.StringAttribute{ - Description: descriptions["project_id"], - Required: true, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "name": schema.StringAttribute{ - Description: descriptions["name"], - Optional: true, - Computed: true, - Validators: []validator.String{ - stringvalidator.LengthAtLeast(1), - }, - }, - "owner": schema.StringAttribute{ - Description: descriptions["owner"], - Computed: true, - }, - "region": schema.StringAttribute{ - // the region cannot be found, so it has to be passed - Optional: true, - Description: descriptions["region"], - }, - }, - } + resp.Schema = s } -// Read refreshes the Terraform state with the latest data. +// Read fetches the data for the data source. func (r *databaseDataSource) Read( ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model dataSourceModel diags := req.Config.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } - // validation for exactly one of database_id or name - isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() - isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown() - - if (isIdSet && isNameSet) || (!isIdSet && !isNameSet) { - core.LogAndAddError( - ctx, &resp.Diagnostics, - "Invalid configuration", "Exactly one of 'database_id' or 'name' must be specified.", - ) - return - } - ctx = core.InitProviderContext(ctx) projectId := model.ProjectId.ValueString() instanceId := model.InstanceId.ValueString() - databaseId := model.DatabaseId.ValueInt64() region := r.providerData.GetRegionWithOverride(model.Region) ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) - ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "region", region) - var databaseResp *postgresflexalpha.ListDatabase - var err error - - if isIdSet { - databaseId := model.DatabaseId.ValueInt64() - ctx = tflog.SetField(ctx, "database_id", databaseId) - databaseResp, err = getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId) - } else { - databaseName := model.Name.ValueString() - ctx = tflog.SetField(ctx, "name", databaseName) - databaseResp, err = getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName) + databaseResp, err := r.getDatabaseByNameOrID(ctx, &model, projectId, region, instanceId, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return } - if err != nil { - utils.LogError( - ctx, - &resp.Diagnostics, - err, - "Reading database", - fmt.Sprintf( - "Database with ID %q or instance with ID %q does not exist in project %q.", - databaseId, - instanceId, - projectId, - ), - map[int]string{ - http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), - }, - ) + handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId) resp.State.RemoveResource(ctx) return } @@ -218,3 +137,60 @@ func (r *databaseDataSource) Read( } tflog.Info(ctx, "Postgres Flex database read") } + +// getDatabaseByNameOrID retrieves a single database by ensuring either a unique ID or name is provided. +func (r *databaseDataSource) getDatabaseByNameOrID( + ctx context.Context, + model *dataSourceModel, + projectId, region, instanceId string, + diags *diag.Diagnostics, +) (*postgresflexalpha.ListDatabase, error) { + isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() + isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown() + + if (isIdSet && isNameSet) || (!isIdSet && !isNameSet) { + diags.AddError( + "Invalid configuration", + "Exactly one of 'id' or 'name' must be specified.", + ) + return nil, nil + } + + if isIdSet { + databaseId := model.DatabaseId.ValueInt64() + ctx = tflog.SetField(ctx, "database_id", databaseId) + return getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId) + } + + databaseName := model.Name.ValueString() + ctx = tflog.SetField(ctx, "name", databaseName) + return getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName) +} + +// handleReadError centralizes API error handling for the Read operation. +func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) { + utils.LogError( + ctx, + diags, + err, + "Reading database", + fmt.Sprintf( + "Could not retrieve database for instance %q in project %q.", + instanceId, + projectId, + ), + map[int]string{ + http.StatusBadRequest: fmt.Sprintf( + "Invalid request parameters for project %q and instance %q.", + projectId, + instanceId, + ), + http.StatusNotFound: fmt.Sprintf( + "Database, instance %q, or project %q not found.", + instanceId, + projectId, + ), + http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId), + }, + ) +} diff --git a/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go new file mode 100644 index 00000000..d5683a6c --- /dev/null +++ b/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go @@ -0,0 +1,69 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package postgresflexalpha + +import ( + "context" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func DatabaseDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "database_id": schema.Int64Attribute{ + Required: true, + Description: "The ID of the database.", + MarkdownDescription: "The ID of the database.", + }, + "tf_original_api_id": schema.Int64Attribute{ + Computed: true, + Description: "The id of the database.", + MarkdownDescription: "The id of the database.", + }, + "instance_id": schema.StringAttribute{ + Required: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, + "name": schema.StringAttribute{ + Computed: true, + Description: "The name of the database.", + MarkdownDescription: "The name of the database.", + }, + "owner": schema.StringAttribute{ + Computed: true, + Description: "The owner of the database.", + MarkdownDescription: "The owner of the database.", + }, + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + }, + } +} + +type DatabaseModel struct { + DatabaseId types.Int64 `tfsdk:"database_id"` + Id types.Int64 `tfsdk:"tf_original_api_id"` + InstanceId types.String `tfsdk:"instance_id"` + Name types.String `tfsdk:"name"` + Owner types.String `tfsdk:"owner"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` +} diff --git a/stackit/internal/services/postgresflexalpha/database/functions.go b/stackit/internal/services/postgresflexalpha/database/functions.go index b1c30bb9..4496faa1 100644 --- a/stackit/internal/services/postgresflexalpha/database/functions.go +++ b/stackit/internal/services/postgresflexalpha/database/functions.go @@ -3,6 +3,7 @@ package postgresflexalpha import ( "context" "fmt" + "strings" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" ) @@ -79,3 +80,12 @@ func getDatabase( return nil, fmt.Errorf("database not found for instance %s", instanceId) } + +// cleanString removes leading and trailing quotes which are sometimes returned by the API. +func cleanString(s *string) *string { + if s == nil { + return nil + } + res := strings.Trim(*s, "\"") + return &res +} diff --git a/stackit/internal/services/postgresflexalpha/database/functions_test.go b/stackit/internal/services/postgresflexalpha/database/functions_test.go index 7ec941db..9f0b47fd 100644 --- a/stackit/internal/services/postgresflexalpha/database/functions_test.go +++ b/stackit/internal/services/postgresflexalpha/database/functions_test.go @@ -4,6 +4,7 @@ import ( "context" "testing" + "github.com/google/go-cmp/cmp" "github.com/stackitcloud/stackit-sdk-go/core/utils" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" ) @@ -12,8 +13,8 @@ type mockRequest struct { executeFunc func() (*postgresflex.ListDatabasesResponse, error) } -func (m *mockRequest) Page(_ int64) postgresflex.ApiListDatabasesRequestRequest { return m } -func (m *mockRequest) Size(_ int64) postgresflex.ApiListDatabasesRequestRequest { return m } +func (m *mockRequest) Page(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m } +func (m *mockRequest) Size(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m } func (m *mockRequest) Sort(_ postgresflex.DatabaseSort) postgresflex.ApiListDatabasesRequestRequest { return m } @@ -176,21 +177,56 @@ func TestGetDatabase(t *testing.T) { } if (errDB != nil) != tt.wantErr { - t.Errorf("getDatabase() error = %v, wantErr %v", errDB, tt.wantErr) + t.Errorf("getDatabaseByNameOrID() error = %v, wantErr %v", errDB, tt.wantErr) return } if !tt.wantErr && tt.wantDbName != "" && actual != nil { if *actual.Name != tt.wantDbName { - t.Errorf("getDatabase() got name = %v, want %v", *actual.Name, tt.wantDbName) + t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", *actual.Name, tt.wantDbName) } } if !tt.wantErr && tt.wantDbId != 0 && actual != nil { if *actual.Id != tt.wantDbId { - t.Errorf("getDatabase() got id = %v, want %v", *actual.Id, tt.wantDbId) + t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", *actual.Id, tt.wantDbId) } } }, ) } } + +func TestCleanString(t *testing.T) { + testcases := []struct { + name string + given *string + expected *string + }{ + { + name: "should remove quotes", + given: utils.Ptr("\"quoted\""), + expected: utils.Ptr("quoted"), + }, + { + name: "should handle nil", + given: nil, + expected: nil, + }, + { + name: "should not change unquoted string", + given: utils.Ptr("unquoted"), + expected: utils.Ptr("unquoted"), + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + actual := cleanString(tc.given) + if diff := cmp.Diff(tc.expected, actual); diff != "" { + t.Errorf("string mismatch (-want +got):\n%s", diff) + } + }, + ) + } +} diff --git a/stackit/internal/services/postgresflexalpha/database/mapper.go b/stackit/internal/services/postgresflexalpha/database/mapper.go new file mode 100644 index 00000000..5785f4b7 --- /dev/null +++ b/stackit/internal/services/postgresflexalpha/database/mapper.go @@ -0,0 +1,92 @@ +package postgresflexalpha + +import ( + "fmt" + "strconv" + + "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source. +func mapFields( + source *postgresflexalpha.ListDatabase, + model *dataSourceModel, + region string, +) error { + if source == nil { + return fmt.Errorf("response is nil") + } + if source.Id == nil || *source.Id == 0 { + return fmt.Errorf("id not present") + } + if model == nil { + return fmt.Errorf("model given is nil") + } + + var databaseId int64 + if model.DatabaseId.ValueInt64() != 0 { + databaseId = model.DatabaseId.ValueInt64() + } else if source.Id != nil { + databaseId = *source.Id + } else { + return fmt.Errorf("database id not present") + } + + model.Id = types.Int64Value(databaseId) + model.DatabaseId = types.Int64Value(databaseId) + model.Name = types.StringValue(source.GetName()) + model.Owner = types.StringPointerValue(cleanString(source.Owner)) + model.Region = types.StringValue(region) + model.ProjectId = types.StringValue(model.ProjectId.ValueString()) + model.InstanceId = types.StringValue(model.InstanceId.ValueString()) + model.TerraformID = utils.BuildInternalTerraformId( + model.ProjectId.ValueString(), + region, + model.InstanceId.ValueString(), + strconv.FormatInt(databaseId, 10), + ) + + return nil +} + +// mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource. +func mapResourceFields(source *postgresflexalpha.ListDatabase, model *resourceModel) error { + if source == nil { + return fmt.Errorf("response is nil") + } + if source.Id == nil || *source.Id == 0 { + return fmt.Errorf("id not present") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + + var databaseId int64 + if model.Id.ValueInt64() != 0 { + databaseId = model.Id.ValueInt64() + } else if source.Id != nil { + databaseId = *source.Id + } else { + return fmt.Errorf("database id not present") + } + + model.Id = types.Int64Value(databaseId) + model.DatabaseId = types.Int64Value(databaseId) + model.Name = types.StringValue(source.GetName()) + model.Owner = types.StringPointerValue(cleanString(source.Owner)) + return nil +} + +// toCreatePayload converts the resource model to an API create payload. +func toCreatePayload(model *resourceModel) (*postgresflexalpha.CreateDatabaseRequestPayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + return &postgresflexalpha.CreateDatabaseRequestPayload{ + Name: model.Name.ValueStringPointer(), + Owner: model.Owner.ValueStringPointer(), + }, nil +} diff --git a/stackit/internal/services/postgresflexalpha/database/mapper_test.go b/stackit/internal/services/postgresflexalpha/database/mapper_test.go new file mode 100644 index 00000000..a2f18c12 --- /dev/null +++ b/stackit/internal/services/postgresflexalpha/database/mapper_test.go @@ -0,0 +1,240 @@ +package postgresflexalpha + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" + datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen" +) + +func TestMapFields(t *testing.T) { + type given struct { + source *postgresflexalpha.ListDatabase + model *dataSourceModel + region string + } + type expected struct { + model *dataSourceModel + err bool + } + + testcases := []struct { + name string + given given + expected expected + }{ + { + name: "should map fields correctly", + given: given{ + source: &postgresflexalpha.ListDatabase{ + Id: utils.Ptr(int64(1)), + Name: utils.Ptr("my-db"), + Owner: utils.Ptr("\"my-owner\""), + }, + model: &dataSourceModel{}, + region: "eu01", + }, + expected: expected{ + model: &dataSourceModel{ + DatabaseModel: datasource.DatabaseModel{ + Id: types.Int64Value(1), + Name: types.StringValue("my-db"), + Owner: types.StringValue("my-owner"), + Region: types.StringValue("eu01"), + DatabaseId: types.Int64Value(1), + InstanceId: types.StringValue("my-instance"), + ProjectId: types.StringValue("my-project"), + }, + TerraformID: types.StringValue("my-project,eu01,my-instance,1"), + }, + }, + }, + { + name: "should preserve existing model ID", + given: given{ + source: &postgresflexalpha.ListDatabase{ + Id: utils.Ptr(int64(1)), + Name: utils.Ptr("my-db"), + }, + model: &dataSourceModel{ + DatabaseModel: datasource.DatabaseModel{ + Id: types.Int64Value(1), + ProjectId: types.StringValue("my-project"), + InstanceId: types.StringValue("my-instance"), + }, + }, + region: "eu01", + }, + expected: expected{ + model: &dataSourceModel{ + DatabaseModel: datasource.DatabaseModel{ + Id: types.Int64Value(1), + Name: types.StringValue("my-db"), + Owner: types.StringNull(), DatabaseId: types.Int64Value(1), + Region: types.StringValue("eu01"), + InstanceId: types.StringValue("my-instance"), + ProjectId: types.StringValue("my-project"), + }, + TerraformID: types.StringValue("my-project,eu01,my-instance,1"), + }, + }, + }, + { + name: "should fail on nil source", + given: given{ + source: nil, + model: &dataSourceModel{}, + }, + expected: expected{err: true}, + }, + { + name: "should fail on nil source ID", + given: given{ + source: &postgresflexalpha.ListDatabase{Id: nil}, + model: &dataSourceModel{}, + }, + expected: expected{err: true}, + }, + { + name: "should fail on nil model", + given: given{ + source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(int64(1))}, + model: nil, + }, + expected: expected{err: true}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + err := mapFields(tc.given.source, tc.given.model, tc.given.region) + if (err != nil) != tc.expected.err { + t.Fatalf("expected error: %v, got: %v", tc.expected.err, err) + } + if err == nil { + if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" { + t.Errorf("model mismatch (-want +got):\n%s", diff) + } + } + }, + ) + } +} + +func TestMapResourceFields(t *testing.T) { + type given struct { + source *postgresflexalpha.ListDatabase + model *resourceModel + } + type expected struct { + model *resourceModel + err bool + } + + testcases := []struct { + name string + given given + expected expected + }{ + { + name: "should map fields correctly", + given: given{ + source: &postgresflexalpha.ListDatabase{ + Id: utils.Ptr(int64(1)), + Name: utils.Ptr("my-db"), + Owner: utils.Ptr("\"my-owner\""), + }, + model: &resourceModel{}, + }, + expected: expected{ + model: &resourceModel{ + Id: types.Int64Value(1), + Name: types.StringValue("my-db"), + Owner: types.StringValue("my-owner"), + DatabaseId: types.Int64Value(1), + }, + }, + }, + { + name: "should fail on nil source", + given: given{ + source: nil, + model: &resourceModel{}, + }, + expected: expected{err: true}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + err := mapResourceFields(tc.given.source, tc.given.model) + if (err != nil) != tc.expected.err { + t.Fatalf("expected error: %v, got: %v", tc.expected.err, err) + } + if err == nil { + if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" { + t.Errorf("model mismatch (-want +got):\n%s", diff) + } + } + }, + ) + } +} + +func TestToCreatePayload(t *testing.T) { + type given struct { + model *resourceModel + } + type expected struct { + payload *postgresflexalpha.CreateDatabaseRequestPayload + err bool + } + + testcases := []struct { + name string + given given + expected expected + }{ + { + name: "should convert model to payload", + given: given{ + model: &resourceModel{ + Name: types.StringValue("my-db"), + Owner: types.StringValue("my-owner"), + }, + }, + expected: expected{ + payload: &postgresflexalpha.CreateDatabaseRequestPayload{ + Name: utils.Ptr("my-db"), + Owner: utils.Ptr("my-owner"), + }, + }, + }, + { + name: "should fail on nil model", + given: given{model: nil}, + expected: expected{err: true}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + actual, err := toCreatePayload(tc.given.model) + if (err != nil) != tc.expected.err { + t.Fatalf("expected error: %v, got: %v", tc.expected.err, err) + } + if err == nil { + if diff := cmp.Diff(tc.expected.payload, actual); diff != "" { + t.Errorf("payload mismatch (-want +got):\n%s", diff) + } + } + }, + ) + } +} diff --git a/stackit/internal/services/postgresflexalpha/database/planModifiers.yaml b/stackit/internal/services/postgresflexalpha/database/planModifiers.yaml new file mode 100644 index 00000000..f3f70aeb --- /dev/null +++ b/stackit/internal/services/postgresflexalpha/database/planModifiers.yaml @@ -0,0 +1,35 @@ +fields: + - name: 'id' + modifiers: + - 'UseStateForUnknown' + + - name: 'database_id' + modifiers: + - 'UseStateForUnknown' + validators: + - validate.NoSeparator + - validate.UUID + + - name: 'instance_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'RequiresReplace' + - 'UseStateForUnknown' + + - name: 'project_id' + modifiers: + - 'RequiresReplace' + - 'UseStateForUnknown' + validators: + - validate.NoSeparator + - validate.UUID + + - name: 'name' + validators: + - validate.NoSeparator + + - name: 'region' + modifiers: + - 'RequiresReplace' diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index 67d1e477..64c62e70 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -2,70 +2,73 @@ package postgresflexalpha import ( "context" + _ "embed" "errors" "fmt" "math" "net/http" - "regexp" "strconv" "strings" - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate" ) -// Ensure the implementation satisfies the expected interfaces. var ( + // Ensure the implementation satisfies the expected interfaces. _ resource.Resource = &databaseResource{} _ resource.ResourceWithConfigure = &databaseResource{} _ resource.ResourceWithImportState = &databaseResource{} _ resource.ResourceWithModifyPlan = &databaseResource{} -) + _ resource.ResourceWithIdentity = &databaseResource{} -type Model struct { - Id types.String `tfsdk:"id"` // needed by TF - DatabaseId types.Int64 `tfsdk:"database_id"` - InstanceId types.String `tfsdk:"instance_id"` - ProjectId types.String `tfsdk:"project_id"` - Name types.String `tfsdk:"name"` - Owner types.String `tfsdk:"owner"` - Region types.String `tfsdk:"region"` -} + // Define errors + errDatabaseNotFound = errors.New("database not found") + + // Error message constants + extractErrorSummary = "extracting failed" + extractErrorMessage = "Extracting identity data: %v" +) // NewDatabaseResource is a helper function to simplify the provider implementation. func NewDatabaseResource() resource.Resource { return &databaseResource{} } +// resourceModel describes the resource data model. +type resourceModel = postgresflexalpha2.DatabaseModel + +// DatabaseResourceIdentityModel describes the resource's identity attributes. +type DatabaseResourceIdentityModel struct { + ProjectID types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + InstanceID types.String `tfsdk:"instance_id"` + DatabaseID types.Int64 `tfsdk:"database_id"` +} + // databaseResource is the resource implementation. type databaseResource struct { client *postgresflexalpha.APIClient providerData core.ProviderData } -// ModifyPlan implements resource.ResourceWithModifyPlan. -// Use the modifier to set the effective region in the current plan. +// ModifyPlan adjusts the plan to set the correct region. func (r *databaseResource) ModifyPlan( ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - var configModel Model + var configModel resourceModel // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return @@ -75,7 +78,7 @@ func (r *databaseResource) ModifyPlan( return } - var planModel Model + var planModel resourceModel resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) if resp.Diagnostics.HasError() { return @@ -117,85 +120,46 @@ func (r *databaseResource) Configure( tflog.Info(ctx, "Postgres Flex database client configured") } +//go:embed planModifiers.yaml +var modifiersFileByte []byte + // Schema defines the schema for the resource. -func (r *databaseResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - descriptions := map[string]string{ - "main": "Postgres Flex database resource schema. Must have a `region` specified in the provider configuration.", - "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".", - "database_id": "Database ID.", - "instance_id": "ID of the Postgres Flex instance.", - "project_id": "STACKIT project ID to which the instance is associated.", - "name": "Database name.", - "owner": "Username of the database owner.", - "region": "The resource region. If not defined, the provider region is used.", +func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + s := postgresflexalpha2.DatabaseResourceSchema(ctx) + + fields, err := utils.ReadModifiersConfig(modifiersFileByte) + if err != nil { + resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) + return } - resp.Schema = schema.Schema{ - Description: descriptions["main"], - Attributes: map[string]schema.Attribute{ - "id": schema.StringAttribute{ - Description: descriptions["id"], - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, + err = utils.AddPlanModifiersToResourceSchema(fields, &s) + if err != nil { + resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) + return + } + resp.Schema = s +} + +// IdentitySchema defines the schema for the resource's identity attributes. +func (r *databaseResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + response *resource.IdentitySchemaResponse, +) { + response.IdentitySchema = identityschema.Schema{ + Attributes: map[string]identityschema.Attribute{ + "project_id": identityschema.StringAttribute{ + RequiredForImport: true, }, - "database_id": schema.Int64Attribute{ - Description: descriptions["database_id"], - Computed: true, - PlanModifiers: []planmodifier.Int64{ - int64planmodifier.UseStateForUnknown(), - }, - Validators: []validator.Int64{}, + "region": identityschema.StringAttribute{ + RequiredForImport: true, }, - "instance_id": schema.StringAttribute{ - Description: descriptions["instance_id"], - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, + "instance_id": identityschema.StringAttribute{ + RequiredForImport: true, }, - "project_id": schema.StringAttribute{ - Description: descriptions["project_id"], - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "name": schema.StringAttribute{ - Description: descriptions["name"], - Required: true, - PlanModifiers: []planmodifier.String{}, - Validators: []validator.String{ - stringvalidator.RegexMatches( - regexp.MustCompile("^[a-z]([a-z0-9]*)?$"), - "must start with a letter, must have lower case letters or numbers", - ), - }, - }, - "owner": schema.StringAttribute{ - Description: descriptions["owner"], - Required: true, - PlanModifiers: []planmodifier.String{}, - }, - "region": schema.StringAttribute{ - Optional: true, - // must be computed to allow for storing the override value from the provider - Computed: true, - Description: descriptions["region"], - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, + "database_id": identityschema.Int64Attribute{ + RequiredForImport: true, }, }, } @@ -207,18 +171,26 @@ func (r *databaseResource) Create( req resource.CreateRequest, resp *resource.CreateResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + ctx = core.InitProviderContext(ctx) - projectId := model.ProjectId.ValueString() - region := model.Region.ValueString() - instanceId := model.InstanceId.ValueString() + projectId := identityData.ProjectID.ValueString() + region := identityData.ProjectID.ValueString() + instanceId := identityData.InstanceID.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "region", region) @@ -272,7 +244,7 @@ func (r *databaseResource) Create( } // Map response body to schema - err = mapFields(database, &model, region) + err = mapResourceFields(database, &model) if err != nil { core.LogAndAddError( ctx, @@ -282,9 +254,21 @@ func (r *databaseResource) Create( ) return } + + // Set data returned by API in identity + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseID: types.Int64Value(databaseId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Set state to fully populated data - diags = resp.State.Set(ctx, model) - resp.Diagnostics.Append(diags...) + resp.Diagnostics.Append(resp.State.Set(ctx, model)...) if resp.Diagnostics.HasError() { return } @@ -297,23 +281,36 @@ func (r *databaseResource) Read( req resource.ReadRequest, resp *resource.ReadResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + ctx = core.InitProviderContext(ctx) - projectId := model.ProjectId.ValueString() - instanceId := model.InstanceId.ValueString() - databaseId := model.DatabaseId.ValueInt64() - region := r.providerData.GetRegionWithOverride(model.Region) + projectId, instanceId, region, databaseId, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } + ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) - ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "database_id", databaseId) databaseResp, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId) if err != nil { @@ -329,7 +326,7 @@ func (r *databaseResource) Read( ctx = core.LogResponse(ctx) // Map response body to schema - err = mapFields(databaseResp, &model, region) + err = mapResourceFields(databaseResp, &model) if err != nil { core.LogAndAddError( ctx, @@ -355,32 +352,45 @@ func (r *databaseResource) Update( req resource.UpdateRequest, resp *resource.UpdateResponse, ) { - var model Model + var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + ctx = core.InitProviderContext(ctx) - projectId := model.ProjectId.ValueString() - instanceId := model.InstanceId.ValueString() - databaseId64 := model.DatabaseId.ValueInt64() + projectId, instanceId, region, databaseId64, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } + if databaseId64 > math.MaxInt32 { core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)") return } databaseId := int32(databaseId64) - region := model.Region.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) - ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "database_id", databaseId) // Retrieve values from state - var stateModel Model + var stateModel resourceModel diags = req.State.Get(ctx, &stateModel) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -420,7 +430,7 @@ func (r *databaseResource) Update( ctx = core.LogResponse(ctx) // Map response body to schema - err = mapFieldsUpdatePartially(res, &model, region) + err = mapResourceFields(res.Database, &model) if err != nil { core.LogAndAddError( ctx, @@ -445,29 +455,41 @@ func (r *databaseResource) Delete( req resource.DeleteRequest, resp *resource.DeleteResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + ctx = core.InitProviderContext(ctx) - projectId := model.ProjectId.ValueString() - instanceId := model.InstanceId.ValueString() - databaseId64 := model.DatabaseId.ValueInt64() + projectId, instanceId, region, databaseId64, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } if databaseId64 > math.MaxInt32 { core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)") return } databaseId := int32(databaseId64) - region := model.Region.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) - ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "database_id", databaseId) // Delete existing record set err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseId) @@ -481,95 +503,118 @@ func (r *databaseResource) Delete( } // ImportState imports a resource into the Terraform state on success. -// The expected format of the resource import identifier is: project_id,zone_id,record_set_id +// The expected import identifier format is: [project_id],[region],[instance_id],[database_id] func (r *databaseResource) ImportState( ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - idParts := strings.Split(req.ID, core.Separator) - if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { - core.LogAndAddError( - ctx, &resp.Diagnostics, - "Error importing database", - fmt.Sprintf( - "Expected import identifier with format [project_id],[region],[instance_id],[database_id], got %q", - req.ID, - ), + + ctx = core.InitProviderContext(ctx) + + if req.ID != "" { + + idParts := strings.Split(req.ID, core.Separator) + + if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { + core.LogAndAddError( + ctx, &resp.Diagnostics, + "Error importing database", + fmt.Sprintf( + "Expected import identifier with format [project_id],[region],[instance_id],[database_id], got %q", + req.ID, + ), + ) + return + } + + databaseId, err := strconv.ParseInt(idParts[3], 10, 64) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error importing database", + fmt.Sprintf("Invalid database_id format: %q. It must be a valid integer.", idParts[3]), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), databaseId)...) + + core.LogAndAddWarning( + ctx, + &resp.Diagnostics, + "Postgresflex database imported with empty password", + "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", ) + + tflog.Info(ctx, "Postgres Flex database state imported") + return } - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), idParts[3])...) - core.LogAndAddWarning( - ctx, - &resp.Diagnostics, - "Postgresflex database imported with empty password", - "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", - ) + // If no ID is provided, attempt to read identity attributes from the import configuration + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + databaseId := identityData.DatabaseID.ValueInt64() + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), databaseId)...) + tflog.Info(ctx, "Postgres Flex database state imported") } -func mapFields(resp *postgresflexalpha.ListDatabase, model *Model, region string) error { - if resp == nil { - return fmt.Errorf("response is nil") - } - if resp.Id == nil || *resp.Id == 0 { - return fmt.Errorf("id not present") - } - if model == nil { - return fmt.Errorf("model input is nil") - } - - var databaseId int64 - if model.DatabaseId.ValueInt64() != 0 { +// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model. +func (r *databaseResource) extractIdentityData( + model resourceModel, + identity DatabaseResourceIdentityModel, +) (projectId, region, instanceId string, databaseId int64, err error) { + if !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() { databaseId = model.DatabaseId.ValueInt64() - } else if resp.Id != nil { - databaseId = *resp.Id } else { - return fmt.Errorf("database id not present") - } - model.Id = utils.BuildInternalTerraformId( - model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(databaseId, 10), - ) - model.DatabaseId = types.Int64Value(databaseId) - model.Name = types.StringPointerValue(resp.Name) - model.Region = types.StringValue(region) - model.Owner = types.StringPointerValue(cleanString(resp.Owner)) - return nil -} - -func mapFieldsUpdatePartially( - res *postgresflexalpha.UpdateDatabasePartiallyResponse, - model *Model, - region string, -) error { - if res == nil { - return fmt.Errorf("response is nil") - } - return mapFields(res.Database, model, region) -} - -func cleanString(s *string) *string { - if s == nil { - return nil - } - res := strings.Trim(*s, "\"") - return &res -} - -func toCreatePayload(model *Model) (*postgresflexalpha.CreateDatabaseRequestPayload, error) { - if model == nil { - return nil, fmt.Errorf("nil model") + if identity.DatabaseID.IsNull() || identity.DatabaseID.IsUnknown() { + return "", "", "", 0, fmt.Errorf("database_id not found in config") + } + databaseId = identity.DatabaseID.ValueInt64() } - return &postgresflexalpha.CreateDatabaseRequestPayload{ - Name: model.Name.ValueStringPointer(), - Owner: model.Owner.ValueStringPointer(), - }, nil -} + if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { + projectId = model.ProjectId.ValueString() + } else { + if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() { + return "", "", "", 0, fmt.Errorf("project_id not found in config") + } + projectId = identity.ProjectID.ValueString() + } -var errDatabaseNotFound = errors.New("database not found") + if !model.Region.IsNull() && !model.Region.IsUnknown() { + region = r.providerData.GetRegionWithOverride(model.Region) + } else { + if identity.Region.IsNull() || identity.Region.IsUnknown() { + return "", "", "", 0, fmt.Errorf("region not found in config") + } + region = r.providerData.GetRegionWithOverride(identity.Region) + } + + if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() { + instanceId = model.InstanceId.ValueString() + } else { + if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() { + return "", "", "", 0, fmt.Errorf("instance_id not found in config") + } + instanceId = identity.InstanceID.ValueString() + } + return projectId, region, instanceId, databaseId, nil +} diff --git a/stackit/internal/services/postgresflexalpha/database/resource_test.go b/stackit/internal/services/postgresflexalpha/database/resource_test.go deleted file mode 100644 index 15bced10..00000000 --- a/stackit/internal/services/postgresflexalpha/database/resource_test.go +++ /dev/null @@ -1,232 +0,0 @@ -package postgresflexalpha - -import ( - "reflect" - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/stackitcloud/stackit-sdk-go/core/utils" - postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" -) - -func TestMapFields(t *testing.T) { - const testRegion = "region" - tests := []struct { - description string - input *postgresflex.ListDatabase - region string - expected Model - isValid bool - }{ - { - "default_values", - &postgresflex.ListDatabase{ - Id: utils.Ptr(int64(1)), - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), - DatabaseId: types.Int64Value(int64(1)), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringNull(), - Owner: types.StringNull(), - Region: types.StringValue(testRegion), - }, - true, - }, - { - "simple_values", - &postgresflex.ListDatabase{ - Id: utils.Ptr(int64(1)), - Name: utils.Ptr("dbname"), - Owner: utils.Ptr("username"), - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), - DatabaseId: types.Int64Value(int64(1)), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringValue("dbname"), - Owner: types.StringValue("username"), - Region: types.StringValue(testRegion), - }, - true, - }, - { - "null_fields_and_int_conversions", - &postgresflex.ListDatabase{ - Id: utils.Ptr(int64(1)), - Name: utils.Ptr(""), - Owner: utils.Ptr(""), - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), - DatabaseId: types.Int64Value(int64(1)), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringValue(""), - Owner: types.StringValue(""), - Region: types.StringValue(testRegion), - }, - true, - }, - { - "nil_response", - nil, - testRegion, - Model{}, - false, - }, - { - "empty_response", - &postgresflex.ListDatabase{}, - testRegion, - Model{}, - false, - }, - { - "no_resource_id", - &postgresflex.ListDatabase{ - Id: utils.Ptr(int64(0)), - Name: utils.Ptr("dbname"), - Owner: utils.Ptr("username"), - }, - testRegion, - Model{}, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - state := &Model{ - ProjectId: tt.expected.ProjectId, - InstanceId: tt.expected.InstanceId, - } - err := mapFields(tt.input, state, tt.region) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(state, &tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} - -func TestToCreatePayload(t *testing.T) { - tests := []struct { - description string - input *Model - expected *postgresflex.CreateDatabaseRequestPayload - isValid bool - }{ - { - "default_values", - &Model{ - Name: types.StringValue("dbname"), - Owner: types.StringValue("username"), - }, - &postgresflex.CreateDatabaseRequestPayload{ - Name: utils.Ptr("dbname"), - Owner: utils.Ptr("username"), - }, - true, - }, - { - "null_fields", - &Model{ - Name: types.StringNull(), - Owner: types.StringNull(), - }, - &postgresflex.CreateDatabaseRequestPayload{ - Name: nil, - Owner: nil, - }, - true, - }, - { - "nil_model", - nil, - nil, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - output, err := toCreatePayload(tt.input) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(output, tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} - -func Test_cleanString(t *testing.T) { - type args struct { - s *string - } - tests := []struct { - name string - args args - want *string - }{ - { - name: "simple_value", - args: args{ - s: utils.Ptr("mytest"), - }, - want: utils.Ptr("mytest"), - }, - { - name: "simple_value_with_quotes", - args: args{ - s: utils.Ptr("\"mytest\""), - }, - want: utils.Ptr("mytest"), - }, - { - name: "simple_values_with_quotes", - args: args{ - s: utils.Ptr("\"my test here\""), - }, - want: utils.Ptr("my test here"), - }, - { - name: "simple_values", - args: args{ - s: utils.Ptr("my test here"), - }, - want: utils.Ptr("my test here"), - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := cleanString(tt.args.s); !reflect.DeepEqual(got, tt.want) { - t.Errorf("cleanString() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go b/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go index 95f6b6e5..6affc956 100644 --- a/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go +++ b/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go @@ -4,6 +4,8 @@ package postgresflexalpha import ( "context" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/resource/schema" @@ -12,11 +14,23 @@ import ( func DatabaseResourceSchema(ctx context.Context) schema.Schema { return schema.Schema{ Attributes: map[string]schema.Attribute{ + "database_id": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "The ID of the database.", + MarkdownDescription: "The ID of the database.", + }, "id": schema.Int64Attribute{ Computed: true, Description: "The id of the database.", MarkdownDescription: "The id of the database.", }, + "instance_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The ID of the instance.", + MarkdownDescription: "The ID of the instance.", + }, "name": schema.StringAttribute{ Required: true, Description: "The name of the database.", @@ -28,12 +42,33 @@ func DatabaseResourceSchema(ctx context.Context) schema.Schema { Description: "The owner of the database.", MarkdownDescription: "The owner of the database.", }, + "project_id": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, }, } } type DatabaseModel struct { - Id types.Int64 `tfsdk:"id"` - Name types.String `tfsdk:"name"` - Owner types.String `tfsdk:"owner"` + DatabaseId types.Int64 `tfsdk:"database_id"` + Id types.Int64 `tfsdk:"id"` + InstanceId types.String `tfsdk:"instance_id"` + Name types.String `tfsdk:"name"` + Owner types.String `tfsdk:"owner"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` } diff --git a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go index db8fa3bf..90590716 100644 --- a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go +++ b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go @@ -12,8 +12,8 @@ type mockRequest struct { executeFunc func() (*postgresflex.GetFlavorsResponse, error) } -func (m *mockRequest) Page(_ int64) postgresflex.ApiGetFlavorsRequestRequest { return m } -func (m *mockRequest) Size(_ int64) postgresflex.ApiGetFlavorsRequestRequest { return m } +func (m *mockRequest) Page(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m } +func (m *mockRequest) Size(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m } func (m *mockRequest) Sort(_ postgresflex.FlavorSort) postgresflex.ApiGetFlavorsRequestRequest { return m } diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasource.go b/stackit/internal/services/postgresflexalpha/flavors/datasource.go index 26be805b..44483018 100644 --- a/stackit/internal/services/postgresflexalpha/flavors/datasource.go +++ b/stackit/internal/services/postgresflexalpha/flavors/datasource.go @@ -21,12 +21,19 @@ func NewFlavorsDataSource() datasource.DataSource { return &flavorsDataSource{} } +// dataSourceModel maps the data source schema data. +type dataSourceModel = postgresflexalphaGen.FlavorsModel + type flavorsDataSource struct { client *postgresflexalpha.APIClient providerData core.ProviderData } -func (d *flavorsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *flavorsDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_postgresflexalpha_flavors" } @@ -35,7 +42,11 @@ func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaReque } // Configure adds the provider configured client to the data source. -func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *flavorsDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { var ok bool d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { @@ -51,7 +62,7 @@ func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.Config } func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data postgresflexalphaGen.FlavorsModel + var data dataSourceModel // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go index 924d1375..dbfe5cc9 100644 --- a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go +++ b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go @@ -33,7 +33,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema { Description: "The flavor description.", MarkdownDescription: "The flavor description.", }, - "id": schema.StringAttribute{ + "tf_original_api_id": schema.StringAttribute{ Computed: true, Description: "The id of the instance flavor.", MarkdownDescription: "The id of the instance flavor.", diff --git a/stackit/internal/services/postgresflexalpha/instance/datasource.go b/stackit/internal/services/postgresflexalpha/instance/datasource.go index de0c5c74..95f7904b 100644 --- a/stackit/internal/services/postgresflexalpha/instance/datasource.go +++ b/stackit/internal/services/postgresflexalpha/instance/datasource.go @@ -5,6 +5,7 @@ import ( "fmt" "net/http" + "github.com/hashicorp/terraform-plugin-framework/types" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen" @@ -26,6 +27,12 @@ func NewInstanceDataSource() datasource.DataSource { return &instanceDataSource{} } +// dataSourceModel maps the data source schema data. +type dataSourceModel struct { + postgresflexalpha2.InstanceModel + TerraformID types.String `tfsdk:"id"` +} + // instanceDataSource is the data source implementation. type instanceDataSource struct { client *postgresflexalpha.APIClient @@ -33,12 +40,20 @@ type instanceDataSource struct { } // Metadata returns the data source type name. -func (r *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (r *instanceDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_postgresflexalpha_instance" } // Configure adds the provider configured client to the data source. -func (r *instanceDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (r *instanceDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { var ok bool r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { @@ -59,8 +74,12 @@ func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequ } // Read refreshes the Terraform state with the latest data. -func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform - var model postgresflexalpha2.InstanceModel +func (r *instanceDataSource) Read( + ctx context.Context, + req datasource.ReadRequest, + resp *datasource.ReadResponse, +) { // nolint:gocritic // function signature required by Terraform + var model dataSourceModel diags := req.Config.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { diff --git a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go index 5ff386fe..047d0176 100644 --- a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go +++ b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go @@ -88,7 +88,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema { Description: "The id of the instance flavor.", MarkdownDescription: "The id of the instance flavor.", }, - "id": schema.StringAttribute{ + "tf_original_api_id": schema.StringAttribute{ Computed: true, Description: "The ID of the instance.", MarkdownDescription: "The ID of the instance.", @@ -204,7 +204,7 @@ type InstanceModel struct { ConnectionInfo ConnectionInfoValue `tfsdk:"connection_info"` Encryption EncryptionValue `tfsdk:"encryption"` FlavorId types.String `tfsdk:"flavor_id"` - Id types.String `tfsdk:"id"` + Id types.String `tfsdk:"tf_original_api_id"` InstanceId types.String `tfsdk:"instance_id"` IsDeletable types.Bool `tfsdk:"is_deletable"` Name types.String `tfsdk:"name"` diff --git a/stackit/internal/services/postgresflexalpha/instance/functions.go b/stackit/internal/services/postgresflexalpha/instance/functions.go index dc29abe4..862f88ff 100644 --- a/stackit/internal/services/postgresflexalpha/instance/functions.go +++ b/stackit/internal/services/postgresflexalpha/instance/functions.go @@ -14,26 +14,32 @@ import ( "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) -func mapGetInstanceResponseToModel(ctx context.Context, m *postgresflexalpharesource.InstanceModel, resp *postgresflex.GetInstanceResponse) error { - tflog.Debug(ctx, ">>>> MSH DEBUG <<<<", map[string]interface{}{ - "id": m.Id.ValueString(), - "instance_id": m.InstanceId.ValueString(), - "backup_schedule": m.BackupSchedule.ValueString(), - "flavor_id": m.FlavorId.ValueString(), - "encryption.kek_key_id": m.Encryption.KekKeyId.ValueString(), - "encryption.kek_key_ring_id": m.Encryption.KekKeyRingId.ValueString(), - "encryption.kek_key_version": m.Encryption.KekKeyVersion.ValueString(), - "encryption.service_account": m.Encryption.ServiceAccount.ValueString(), - "is_deletable": m.IsDeletable.ValueBool(), - "name": m.Name.ValueString(), - "status": m.Status.ValueString(), - "retention_days": m.RetentionDays.ValueInt64(), - "replicas": m.Replicas.ValueInt64(), - "network.instance_address": m.Network.InstanceAddress.ValueString(), - "network.router_address": m.Network.RouterAddress.ValueString(), - "version": m.Version.ValueString(), - "network.acl": m.Network.Acl.String(), - }) +func mapGetInstanceResponseToModel( + ctx context.Context, + m *postgresflexalpharesource.InstanceModel, + resp *postgresflex.GetInstanceResponse, +) error { + tflog.Debug( + ctx, ">>>> MSH DEBUG <<<<", map[string]interface{}{ + "id": m.Id.ValueString(), + "instance_id": m.InstanceId.ValueString(), + "backup_schedule": m.BackupSchedule.ValueString(), + "flavor_id": m.FlavorId.ValueString(), + "encryption.kek_key_id": m.Encryption.KekKeyId.ValueString(), + "encryption.kek_key_ring_id": m.Encryption.KekKeyRingId.ValueString(), + "encryption.kek_key_version": m.Encryption.KekKeyVersion.ValueString(), + "encryption.service_account": m.Encryption.ServiceAccount.ValueString(), + "is_deletable": m.IsDeletable.ValueBool(), + "name": m.Name.ValueString(), + "status": m.Status.ValueString(), + "retention_days": m.RetentionDays.ValueInt64(), + "replicas": m.Replicas.ValueInt64(), + "network.instance_address": m.Network.InstanceAddress.ValueString(), + "network.router_address": m.Network.RouterAddress.ValueString(), + "version": m.Version.ValueString(), + "network.acl": m.Network.Acl.String(), + }, + ) m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) m.Encryption = postgresflexalpharesource.NewEncryptionValueNull() @@ -61,7 +67,11 @@ func mapGetInstanceResponseToModel(ctx context.Context, m *postgresflexalphareso m.FlavorId = types.StringValue(resp.GetFlavorId()) if m.Id.IsNull() || m.Id.IsUnknown() { - m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString()) + m.Id = utils.BuildInternalTerraformId( + m.ProjectId.ValueString(), + m.Region.ValueString(), + m.InstanceId.ValueString(), + ) } m.InstanceId = types.StringPointerValue(resp.Id) @@ -121,7 +131,11 @@ func mapGetInstanceResponseToModel(ctx context.Context, m *postgresflexalphareso return nil } -func mapGetDataInstanceResponseToModel(ctx context.Context, m *postgresflexalphadatasource.InstanceModel, resp *postgresflex.GetInstanceResponse) error { +func mapGetDataInstanceResponseToModel( + ctx context.Context, + m *dataSourceModel, + resp *postgresflex.GetInstanceResponse, +) error { m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) handleEncryption(m, resp) m.ConnectionInfo.Host = types.StringValue(resp.ConnectionInfo.GetHost()) @@ -155,7 +169,7 @@ func mapGetDataInstanceResponseToModel(ctx context.Context, m *postgresflexalpha return nil } -func handleNetwork(ctx context.Context, m *postgresflexalphadatasource.InstanceModel, resp *postgresflex.GetInstanceResponse) error { +func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) error { netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl()) if diags.HasError() { return fmt.Errorf("failed converting network acl from response") @@ -187,7 +201,7 @@ func handleNetwork(ctx context.Context, m *postgresflexalphadatasource.InstanceM return nil } -func handleEncryption(m *postgresflexalphadatasource.InstanceModel, resp *postgresflex.GetInstanceResponse) { +func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) { keyId := "" if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { keyId = keyIdVal diff --git a/stackit/internal/services/postgresflexalpha/instance/resource.go b/stackit/internal/services/postgresflexalpha/instance/resource.go index f061f8bf..78bb0572 100644 --- a/stackit/internal/services/postgresflexalpha/instance/resource.go +++ b/stackit/internal/services/postgresflexalpha/instance/resource.go @@ -23,8 +23,6 @@ import ( wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha" ) -const packageName = "postgresflexalpha" - // Ensure the implementation satisfies the expected interfaces. var ( _ resource.Resource = &instanceResource{} @@ -40,11 +38,8 @@ func NewInstanceResource() resource.Resource { return &instanceResource{} } -// instanceResource is the resource implementation. -type instanceResource struct { - client *postgresflex.APIClient - providerData core.ProviderData -} +// resourceModel describes the resource data model. +type resourceModel = postgresflexalpha.InstanceModel type InstanceResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` @@ -52,8 +47,18 @@ type InstanceResourceIdentityModel struct { InstanceID types.String `tfsdk:"instance_id"` } -func (r *instanceResource) ValidateConfig(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) { - var data postgresflexalpha.InstanceModel +// instanceResource is the resource implementation. +type instanceResource struct { + client *postgresflex.APIClient + providerData core.ProviderData +} + +func (r *instanceResource) ValidateConfig( + ctx context.Context, + req resource.ValidateConfigRequest, + resp *resource.ValidateConfigResponse, +) { + var data resourceModel resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) if resp.Diagnostics.HasError() { @@ -72,8 +77,12 @@ func (r *instanceResource) ValidateConfig(ctx context.Context, req resource.Vali // ModifyPlan implements resource.ResourceWithModifyPlan. // Use the modifier to set the effective region in the current plan. -func (r *instanceResource) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) { // nolint:gocritic // function signature required by Terraform - var configModel postgresflexalpha.InstanceModel +func (r *instanceResource) ModifyPlan( + ctx context.Context, + req resource.ModifyPlanRequest, + resp *resource.ModifyPlanResponse, +) { // nolint:gocritic // function signature required by Terraform + var configModel resourceModel // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return @@ -83,7 +92,7 @@ func (r *instanceResource) ModifyPlan(ctx context.Context, req resource.ModifyPl return } - var planModel postgresflexalpha.InstanceModel + var planModel resourceModel resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) if resp.Diagnostics.HasError() { return @@ -135,13 +144,13 @@ var modifiersFileByte []byte // Schema defines the schema for the resource. func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { schema := postgresflexalpha.InstanceResourceSchema(ctx) - fields, err := postgresflexUtils.ReadModifiersConfig(modifiersFileByte) + fields, err := utils.ReadModifiersConfig(modifiersFileByte) if err != nil { resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) return } - err = postgresflexUtils.AddPlanModifiersToResourceSchema(fields, &schema) + err = utils.AddPlanModifiersToResourceSchema(fields, &schema) if err != nil { resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) return @@ -149,7 +158,11 @@ func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp.Schema = schema } -func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { +func (r *instanceResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + resp *resource.IdentitySchemaResponse, +) { resp.IdentitySchema = identityschema.Schema{ Attributes: map[string]identityschema.Attribute{ "project_id": identityschema.StringAttribute{ @@ -171,7 +184,7 @@ func (r *instanceResource) Create( req resource.CreateRequest, resp *resource.CreateResponse, ) { // nolint:gocritic // function signature required by Terraform - var model postgresflexalpha.InstanceModel + var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) @@ -201,7 +214,11 @@ func (r *instanceResource) Create( payload := modelToCreateInstancePayload(netAcl, model, replVal) // Create new instance - createResp, err := r.client.CreateInstanceRequest(ctx, projectId, region).CreateInstanceRequestPayload(payload).Execute() + createResp, err := r.client.CreateInstanceRequest( + ctx, + projectId, + region, + ).CreateInstanceRequestPayload(payload).Execute() if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "error creating instance", fmt.Sprintf("Calling API: %v", err)) return @@ -227,13 +244,23 @@ func (r *instanceResource) Create( waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).WaitWithContext(ctx) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating instance", fmt.Sprintf("Wait handler error: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating instance", + fmt.Sprintf("Wait handler error: %v", err), + ) return } err = mapGetInstanceResponseToModel(ctx, &model, waitResp) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating instance", fmt.Sprintf("Error creating model: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating instance", + fmt.Sprintf("Error creating model: %v", err), + ) return } @@ -246,7 +273,11 @@ func (r *instanceResource) Create( tflog.Info(ctx, "Postgres Flex instance created") } -func modelToCreateInstancePayload(netAcl []string, model postgresflexalpha.InstanceModel, replVal int32) postgresflex.CreateInstanceRequestPayload { +func modelToCreateInstancePayload( + netAcl []string, + model postgresflexalpha.InstanceModel, + replVal int32, +) postgresflex.CreateInstanceRequestPayload { var enc *postgresflex.InstanceEncryption if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() { enc = &postgresflex.InstanceEncryption{ @@ -279,10 +310,14 @@ func modelToCreateInstancePayload(netAcl []string, model postgresflexalpha.Insta } // Read refreshes the Terraform state with the latest data. -func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { // nolint:gocritic // function signature required by Terraform +func (r *instanceResource) Read( + ctx context.Context, + req resource.ReadRequest, + resp *resource.ReadResponse, +) { // nolint:gocritic // function signature required by Terraform functionErrorSummary := "read instance failed" - var model postgresflexalpha.InstanceModel + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -371,7 +406,12 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r err = mapGetInstanceResponseToModel(ctx, &model, instanceResp) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, functionErrorSummary, fmt.Sprintf("Processing API payload: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + functionErrorSummary, + fmt.Sprintf("Processing API payload: %v", err), + ) return } @@ -396,8 +436,12 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r } // Update updates the resource and sets the updated Terraform state on success. -func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { // nolint:gocritic // function signature required by Terraform - var model postgresflexalpha.InstanceModel +func (r *instanceResource) Update( + ctx context.Context, + req resource.UpdateRequest, + resp *resource.UpdateResponse, +) { // nolint:gocritic // function signature required by Terraform + var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) @@ -475,15 +519,31 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques ctx = core.LogResponse(ctx) - waitResp, err := wait.PartialUpdateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).WaitWithContext(ctx) + waitResp, err := wait.PartialUpdateInstanceWaitHandler( + ctx, + r.client, + projectId, + region, + instanceId, + ).WaitWithContext(ctx) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", fmt.Sprintf("Instance update waiting: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error updating instance", + fmt.Sprintf("Instance update waiting: %v", err), + ) return } err = mapGetInstanceResponseToModel(ctx, &model, waitResp) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", fmt.Sprintf("Processing API payload: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error updating instance", + fmt.Sprintf("Processing API payload: %v", err), + ) return } @@ -496,8 +556,12 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques } // Delete deletes the resource and removes the Terraform state on success. -func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { // nolint:gocritic // function signature required by Terraform - var model postgresflexalpha.InstanceModel +func (r *instanceResource) Delete( + ctx context.Context, + req resource.DeleteRequest, + resp *resource.DeleteResponse, +) { // nolint:gocritic // function signature required by Terraform + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) @@ -538,16 +602,24 @@ func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteReques // ImportState imports a resource into the Terraform state on success. // The expected format of the resource import identifier is: project_id,region,instance_id -func (r *instanceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { +func (r *instanceResource) ImportState( + ctx context.Context, + req resource.ImportStateRequest, + resp *resource.ImportStateResponse, +) { ctx = core.InitProviderContext(ctx) if req.ID != "" { idParts := strings.Split(req.ID, core.Separator) if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" { - core.LogAndAddError(ctx, &resp.Diagnostics, + core.LogAndAddError( + ctx, &resp.Diagnostics, "Error importing instance", - fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID), + fmt.Sprintf( + "Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", + req.ID, + ), ) return } @@ -558,25 +630,20 @@ func (r *instanceResource) ImportState(ctx context.Context, req resource.ImportS return } + // If no ID is provided, attempt to read identity attributes from the import configuration var identityData InstanceResourceIdentityModel resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) if resp.Diagnostics.HasError() { return } - resp.Diagnostics.Append( - resp.State.SetAttribute( - ctx, - path.Root("id"), - utils.BuildInternalTerraformId( - identityData.ProjectID.ValueString(), - identityData.Region.ValueString(), - identityData.InstanceID.ValueString(), - ), - )...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...) + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) tflog.Info(ctx, "Postgres Flex instance state imported") } diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go index 2a8a12b5..fbc442e4 100644 --- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go +++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go @@ -164,6 +164,11 @@ func TestAccPostgresFlexFlexResource(t *testing.T) { Steps: []resource.TestStep{ // Creation { + // testdata/ + // ConfigDirectory: config.TestNameDirectory(), + + // testdata// + // ConfigDirectory: config.TestStepDirectory(), Config: configResources(instanceResource["backup_schedule"], &testutil.Region), Check: resource.ComposeAggregateTestCheckFunc( // Instance diff --git a/stackit/internal/services/postgresflexalpha/user/datasource.go b/stackit/internal/services/postgresflexalpha/user/datasource.go index 70d05aba..b0cf9d3b 100644 --- a/stackit/internal/services/postgresflexalpha/user/datasource.go +++ b/stackit/internal/services/postgresflexalpha/user/datasource.go @@ -5,22 +5,19 @@ import ( "fmt" "math" "net/http" - "strconv" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" - "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate" - - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" ) // Ensure the implementation satisfies the expected interfaces. @@ -28,25 +25,17 @@ var ( _ datasource.DataSource = &userDataSource{} ) -type DataSourceModel struct { - Id types.String `tfsdk:"id"` // needed by TF - UserId types.Int64 `tfsdk:"user_id"` - InstanceId types.String `tfsdk:"instance_id"` - ProjectId types.String `tfsdk:"project_id"` - Username types.String `tfsdk:"username"` - Roles types.Set `tfsdk:"roles"` - Host types.String `tfsdk:"host"` - Port types.Int64 `tfsdk:"port"` - Region types.String `tfsdk:"region"` - Status types.String `tfsdk:"status"` - ConnectionString types.String `tfsdk:"connection_string"` -} - // NewUserDataSource is a helper function to simplify the provider implementation. func NewUserDataSource() datasource.DataSource { return &userDataSource{} } +// dataSourceModel maps the data source schema data. +type dataSourceModel struct { + postgresflexalpha.UserModel + TerraformID types.String `tfsdk:"id"` +} + // userDataSource is the data source implementation. type userDataSource struct { client *postgresflex.APIClient @@ -83,84 +72,16 @@ func (r *userDataSource) Configure( } // Schema defines the schema for the data source. -func (r *userDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - descriptions := map[string]string{ - "main": "Postgres Flex user data source schema. Must have a `region` specified in the provider configuration.", - "id": "Terraform's internal data source. ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".", - "user_id": "User ID.", - "instance_id": "ID of the PostgresFlex instance.", - "project_id": "STACKIT project ID to which the instance is associated.", - "username": "The name of the user.", - "roles": "The roles assigned to the user.", - "host": "The host address for the user to connect to the instance.", - "port": "The port number for the user to connect to the instance.", - "region": "The resource region. If not defined, the provider region is used.", - "status": "The current status of the user.", - "connection_string": "The connection string for the user to the instance.", +func (r *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + s := postgresflexalpha.UserDataSourceSchema(ctx) + s.Attributes["id"] = schema.StringAttribute{ + Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," + + "`user_id`\\\".\",", + Optional: true, + Computed: true, } - resp.Schema = schema.Schema{ - Description: descriptions["main"], - Attributes: map[string]schema.Attribute{ - "id": schema.StringAttribute{ - Description: descriptions["id"], - Computed: true, - }, - "user_id": schema.StringAttribute{ - Description: descriptions["user_id"], - Required: true, - Validators: []validator.String{ - validate.NoSeparator(), - }, - }, - "instance_id": schema.StringAttribute{ - Description: descriptions["instance_id"], - Required: true, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "project_id": schema.StringAttribute{ - Description: descriptions["project_id"], - Required: true, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "username": schema.StringAttribute{ - Description: descriptions["username"], - Computed: true, - }, - "roles": schema.SetAttribute{ - Description: descriptions["roles"], - ElementType: types.StringType, - Computed: true, - }, - "host": schema.StringAttribute{ - Description: descriptions["host"], - Computed: true, - }, - "port": schema.Int64Attribute{ - Description: descriptions["port"], - Computed: true, - }, - "region": schema.StringAttribute{ - // the region cannot be found automatically, so it has to be passed - Optional: true, - Description: descriptions["region"], - }, - "status": schema.StringAttribute{ - Description: descriptions["status"], - Computed: true, - }, - "connection_string": schema.StringAttribute{ - Description: descriptions["connection_string"], - Computed: true, - }, - }, - } + resp.Schema = s } // Read refreshes the Terraform state with the latest data. @@ -169,7 +90,7 @@ func (r *userDataSource) Read( req datasource.ReadRequest, resp *datasource.ReadResponse, ) { // nolint:gocritic // function signature required by Terraform - var model DataSourceModel + var model dataSourceModel diags := req.Config.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -190,26 +111,12 @@ func (r *userDataSource) Read( region := r.providerData.GetRegionWithOverride(model.Region) ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) - ctx = tflog.SetField(ctx, "user_id", userId) ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "user_id", userId) recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute() if err != nil { - utils.LogError( - ctx, - &resp.Diagnostics, - err, - "Reading user", - fmt.Sprintf( - "User with ID %q or instance with ID %q does not exist in project %q.", - userId, - instanceId, - projectId, - ), - map[int]string{ - http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), - }, - ) + handleReadError(ctx, &diags, err, projectId, instanceId, userId) resp.State.RemoveResource(ctx) return } @@ -237,47 +144,38 @@ func (r *userDataSource) Read( tflog.Info(ctx, "Postgres Flex user read") } -func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *DataSourceModel, region string) error { - if userResp == nil { - return fmt.Errorf("response is nil") - } - if model == nil { - return fmt.Errorf("model input is nil") - } - user := userResp - - var userId int64 - if model.UserId.ValueInt64() != 0 { - userId = model.UserId.ValueInt64() - } else if user.Id != nil { - userId = *user.Id - } else { - return fmt.Errorf("user id not present") - } - - model.Id = utils.BuildInternalTerraformId( - model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10), +// handleReadError centralizes API error handling for the Read operation. +func handleReadError( + ctx context.Context, + diags *diag.Diagnostics, + err error, + projectId, instanceId string, + userId int32, +) { + utils.LogError( + ctx, + diags, + err, + "Reading user", + fmt.Sprintf( + "User with ID %q or instance with ID %q does not exist in project %q.", + userId, + instanceId, + projectId, + ), + map[int]string{ + http.StatusBadRequest: fmt.Sprintf( + "Invalid user request parameters for project %q and instance %q.", + projectId, + instanceId, + ), + http.StatusNotFound: fmt.Sprintf( + "User, instance %q, or project %q or user %q not found.", + instanceId, + projectId, + userId, + ), + http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId), + }, ) - model.UserId = types.Int64Value(userId) - model.Username = types.StringPointerValue(user.Name) - - if user.Roles == nil { - model.Roles = types.SetNull(types.StringType) - } else { - var roles []attr.Value - for _, role := range *user.Roles { - roles = append(roles, types.StringValue(string(role))) - } - rolesSet, diags := types.SetValue(types.StringType, roles) - if diags.HasError() { - return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) - } - model.Roles = rolesSet - } - model.Host = types.StringPointerValue(user.Host) - model.Port = types.Int64PointerValue(user.Port) - model.Region = types.StringValue(region) - model.Status = types.StringPointerValue(user.Status) - model.ConnectionString = types.StringPointerValue(user.ConnectionString) - return nil } diff --git a/stackit/internal/services/postgresflexalpha/user/datasource_test.go b/stackit/internal/services/postgresflexalpha/user/datasource_test.go deleted file mode 100644 index 679bef85..00000000 --- a/stackit/internal/services/postgresflexalpha/user/datasource_test.go +++ /dev/null @@ -1,146 +0,0 @@ -package postgresflexalpha - -import ( - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/stackitcloud/stackit-sdk-go/core/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" -) - -func TestMapDataSourceFields(t *testing.T) { - const testRegion = "region" - tests := []struct { - description string - input *postgresflexalpha.GetUserResponse - region string - expected DataSourceModel - isValid bool - }{ - { - "default_values", - &postgresflexalpha.GetUserResponse{}, - testRegion, - DataSourceModel{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.SetNull(types.StringType), - Host: types.StringNull(), - Port: types.Int64Null(), - Region: types.StringValue(testRegion), - }, - true, - }, - { - "simple_values", - &postgresflexalpha.GetUserResponse{ - Roles: &[]postgresflexalpha.UserRole{ - "role_1", - "role_2", - "", - }, - Name: utils.Ptr("username"), - Host: utils.Ptr("host"), - Port: utils.Ptr(int64(1234)), - }, - testRegion, - DataSourceModel{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringValue("username"), - Roles: types.SetValueMust( - types.StringType, []attr.Value{ - types.StringValue("role_1"), - types.StringValue("role_2"), - types.StringValue(""), - }, - ), - Host: types.StringValue("host"), - Port: types.Int64Value(1234), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), - }, - true, - }, - { - "null_fields_and_int_conversions", - &postgresflexalpha.GetUserResponse{ - Id: utils.Ptr(int64(1)), - Roles: &[]postgresflexalpha.UserRole{}, - Name: nil, - Host: nil, - Port: utils.Ptr(int64(2123456789)), - Status: utils.Ptr("status"), - ConnectionString: utils.Ptr("connection_string"), - }, - testRegion, - DataSourceModel{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.SetValueMust(types.StringType, []attr.Value{}), - Host: types.StringNull(), - Port: types.Int64Value(2123456789), - Region: types.StringValue(testRegion), - Status: types.StringValue("status"), - ConnectionString: types.StringValue("connection_string"), - }, - true, - }, - { - "nil_response", - nil, - testRegion, - DataSourceModel{}, - false, - }, - { - "nil_response_2", - &postgresflexalpha.GetUserResponse{}, - testRegion, - DataSourceModel{}, - false, - }, - { - "no_resource_id", - &postgresflexalpha.GetUserResponse{}, - testRegion, - DataSourceModel{}, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - state := &DataSourceModel{ - ProjectId: tt.expected.ProjectId, - InstanceId: tt.expected.InstanceId, - UserId: tt.expected.UserId, - } - err := mapDataSourceFields(tt.input, state, tt.region) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(state, &tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} diff --git a/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go index fb2a7644..29a7cca0 100644 --- a/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go +++ b/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go @@ -14,17 +14,7 @@ import ( func UserDataSourceSchema(ctx context.Context) schema.Schema { return schema.Schema{ Attributes: map[string]schema.Attribute{ - "connection_string": schema.StringAttribute{ - Computed: true, - Description: "The connection string for the user to the instance.", - MarkdownDescription: "The connection string for the user to the instance.", - }, - "host": schema.StringAttribute{ - Computed: true, - Description: "The host of the instance in which the user belongs to.", - MarkdownDescription: "The host of the instance in which the user belongs to.", - }, - "id": schema.Int64Attribute{ + "tf_original_api_id": schema.Int64Attribute{ Computed: true, Description: "The ID of the user.", MarkdownDescription: "The ID of the user.", @@ -39,11 +29,6 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema { Description: "The name of the user.", MarkdownDescription: "The name of the user.", }, - "port": schema.Int64Attribute{ - Computed: true, - Description: "The port of the instance in which the user belongs to.", - MarkdownDescription: "The port of the instance in which the user belongs to.", - }, "project_id": schema.StringAttribute{ Required: true, Description: "The STACKIT project ID.", @@ -80,15 +65,12 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema { } type UserModel struct { - ConnectionString types.String `tfsdk:"connection_string"` - Host types.String `tfsdk:"host"` - Id types.Int64 `tfsdk:"id"` - InstanceId types.String `tfsdk:"instance_id"` - Name types.String `tfsdk:"name"` - Port types.Int64 `tfsdk:"port"` - ProjectId types.String `tfsdk:"project_id"` - Region types.String `tfsdk:"region"` - Roles types.List `tfsdk:"roles"` - Status types.String `tfsdk:"status"` - UserId types.Int64 `tfsdk:"user_id"` + Id types.Int64 `tfsdk:"tf_original_api_id"` + InstanceId types.String `tfsdk:"instance_id"` + Name types.String `tfsdk:"name"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Roles types.List `tfsdk:"roles"` + Status types.String `tfsdk:"status"` + UserId types.Int64 `tfsdk:"user_id"` } diff --git a/stackit/internal/services/postgresflexalpha/user/mapper.go b/stackit/internal/services/postgresflexalpha/user/mapper.go new file mode 100644 index 00000000..2445cb16 --- /dev/null +++ b/stackit/internal/services/postgresflexalpha/user/mapper.go @@ -0,0 +1,142 @@ +package postgresflexalpha + +import ( + "fmt" + "strconv" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +// mapDataSourceFields maps API response to data source model, preserving existing ID. +func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSourceModel, region string) error { + if userResp == nil { + return fmt.Errorf("response is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + user := userResp + + var userId int64 + if model.UserId.ValueInt64() != 0 { + userId = model.UserId.ValueInt64() + } else if user.Id != nil { + userId = *user.Id + } else { + return fmt.Errorf("user id not present") + } + + model.TerraformID = utils.BuildInternalTerraformId( + model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10), + ) + + model.UserId = types.Int64Value(userId) + model.Name = types.StringValue(user.GetName()) + + if user.Roles == nil { + model.Roles = types.List(types.SetNull(types.StringType)) + } else { + var roles []attr.Value + for _, role := range *user.Roles { + roles = append(roles, types.StringValue(string(role))) + } + rolesSet, diags := types.SetValue(types.StringType, roles) + if diags.HasError() { + return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) + } + model.Roles = types.List(rolesSet) + } + + model.Id = types.Int64Value(userId) + model.Region = types.StringValue(region) + model.Status = types.StringValue(user.GetStatus()) + return nil +} + +// toPayloadRoles converts a string slice to the API's role type. +func toPayloadRoles(roles *[]string) *[]postgresflex.UserRole { + var userRoles = make([]postgresflex.UserRole, 0, len(*roles)) + for _, role := range *roles { + userRoles = append(userRoles, postgresflex.UserRole(role)) + } + return &userRoles +} + +// toUpdatePayload creates an API update payload from the resource model. +func toUpdatePayload(model *resourceModel, roles *[]string) ( + *postgresflex.UpdateUserRequestPayload, + error, +) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + if roles == nil { + return nil, fmt.Errorf("nil roles") + } + + return &postgresflex.UpdateUserRequestPayload{ + Name: conversion.StringValueToPointer(model.Name), + Roles: toPayloadRoles(roles), + }, nil +} + +// toCreatePayload creates an API create payload from the resource model. +func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.CreateUserRequestPayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + if roles == nil { + return nil, fmt.Errorf("nil roles") + } + + return &postgresflex.CreateUserRequestPayload{ + Roles: toPayloadRoles(roles), + Name: conversion.StringValueToPointer(model.Name), + }, nil +} + +// mapResourceFields maps API response to the resource model, preserving existing ID. +func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceModel, region string) error { + if userResp == nil { + return fmt.Errorf("response is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + user := userResp + + var userId int64 + if model.UserId.ValueInt64() != 0 { + userId = model.UserId.ValueInt64() + } else if user.Id != nil { + userId = *user.Id + } else { + return fmt.Errorf("user id not present") + } + + model.Id = types.Int64Value(userId) + model.UserId = types.Int64Value(userId) + model.Name = types.StringPointerValue(user.Name) + + if user.Roles == nil { + model.Roles = types.List(types.SetNull(types.StringType)) + } else { + var roles []attr.Value + for _, role := range *user.Roles { + roles = append(roles, types.StringValue(string(role))) + } + rolesSet, diags := types.SetValue(types.StringType, roles) + if diags.HasError() { + return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) + } + model.Roles = types.List(rolesSet) + } + model.Region = types.StringValue(region) + model.Status = types.StringPointerValue(user.Status) + return nil +} diff --git a/stackit/internal/services/postgresflexalpha/user/mapper_test.go b/stackit/internal/services/postgresflexalpha/user/mapper_test.go new file mode 100644 index 00000000..6eeff9f0 --- /dev/null +++ b/stackit/internal/services/postgresflexalpha/user/mapper_test.go @@ -0,0 +1,569 @@ +package postgresflexalpha + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" + data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen" +) + +func TestMapDataSourceFields(t *testing.T) { + const testRegion = "region" + tests := []struct { + description string + input *postgresflex.GetUserResponse + region string + expected dataSourceModel + isValid bool + }{ + { + "default_values", + &postgresflex.GetUserResponse{}, + testRegion, + dataSourceModel{ + UserModel: data.UserModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringValue(""), + Roles: types.List(types.SetNull(types.StringType)), + Status: types.StringValue(""), + Region: types.StringValue(testRegion), + }, + TerraformID: types.StringValue("pid,region,iid,1"), + }, + true, + }, + { + "simple_values", + &postgresflex.GetUserResponse{ + Roles: &[]postgresflex.UserRole{ + "role_1", + "role_2", + "", + }, + Name: utils.Ptr("username"), + }, + testRegion, + dataSourceModel{ + + UserModel: data.UserModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringValue("username"), + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), + ), + Region: types.StringValue(testRegion), + Status: types.StringValue(""), + }, + TerraformID: types.StringValue("pid,region,iid,1"), + }, + true, + }, + { + "null_fields_and_int_conversions", + &postgresflex.GetUserResponse{ + Id: utils.Ptr(int64(1)), + Roles: &[]postgresflex.UserRole{}, + Name: nil, + Status: utils.Ptr("status"), + }, + testRegion, + dataSourceModel{ + UserModel: data.UserModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringValue(""), + Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), + Region: types.StringValue(testRegion), + Status: types.StringValue("status"), + }, + TerraformID: types.StringValue("pid,region,iid,1"), + }, + true, + }, + { + "nil_response", + nil, + testRegion, + dataSourceModel{}, + false, + }, + { + "nil_response_2", + &postgresflex.GetUserResponse{}, + testRegion, + dataSourceModel{}, + false, + }, + { + "no_resource_id", + &postgresflex.GetUserResponse{}, + testRegion, + dataSourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + state := &dataSourceModel{ + UserModel: data.UserModel{ + ProjectId: tt.expected.ProjectId, + InstanceId: tt.expected.InstanceId, + UserId: tt.expected.UserId, + }, + } + err := mapDataSourceFields(tt.input, state, tt.region) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestMapFieldsCreate(t *testing.T) { + const testRegion = "region" + tests := []struct { + description string + input *postgresflex.GetUserResponse + region string + expected resourceModel + isValid bool + }{ + { + "default_values", + &postgresflex.GetUserResponse{ + Id: utils.Ptr(int64(1)), + }, + testRegion, + resourceModel{ + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Password: types.StringNull(), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + ConnectionString: types.StringNull(), + }, + true, + }, + { + "simple_values", + &postgresflex.GetUserResponse{ + Id: utils.Ptr(int64(1)), + Name: utils.Ptr("username"), + Status: utils.Ptr("status"), + }, + testRegion, + resourceModel{ + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringValue("username"), + Roles: types.List(types.SetNull(types.StringType)), + Password: types.StringNull(), + Region: types.StringValue(testRegion), + Status: types.StringValue("status"), + ConnectionString: types.StringValue("connection_string"), + }, + true, + }, + { + "null_fields_and_int_conversions", + &postgresflex.GetUserResponse{ + Id: utils.Ptr(int64(1)), + Name: nil, + Status: nil, + }, + testRegion, + resourceModel{ + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Password: types.StringNull(), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + ConnectionString: types.StringNull(), + }, + true, + }, + { + "nil_response", + nil, + testRegion, + resourceModel{}, + false, + }, + { + "nil_response_2", + &postgresflex.GetUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + { + "no_resource_id", + &postgresflex.GetUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + state := &resourceModel{ + ProjectId: tt.expected.ProjectId, + InstanceId: tt.expected.InstanceId, + } + + err := mapResourceFields(tt.input, state, tt.region) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestMapFields(t *testing.T) { + const testRegion = "region" + tests := []struct { + description string + input *postgresflex.GetUserResponse + region string + expected resourceModel + isValid bool + }{ + { + "default_values", + &postgresflex.GetUserResponse{ + Id: utils.Ptr(int64(1)), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(int64(1)), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + ConnectionString: types.StringNull(), + }, + true, + }, + { + "simple_values", + &postgresflex.GetUserResponse{ + Id: utils.Ptr(int64(1)), + Roles: &[]postgresflex.UserRole{ + "role_1", + "role_2", + "", + }, + Name: utils.Ptr("username"), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringValue("username"), + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), + ), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + ConnectionString: types.StringNull(), + }, + true, + }, + { + "null_fields_and_int_conversions", + &postgresflex.GetUserResponse{ + Id: utils.Ptr(int64(1)), + Name: nil, + }, + testRegion, + resourceModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + ConnectionString: types.StringNull(), + }, + true, + }, + { + "nil_response", + nil, + testRegion, + resourceModel{}, + false, + }, + { + "nil_response_2", + &postgresflex.GetUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + { + "no_resource_id", + &postgresflex.GetUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + state := &resourceModel{ + ProjectId: tt.expected.ProjectId, + InstanceId: tt.expected.InstanceId, + } + err := mapResourceFields(tt.input, state, tt.region) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestToCreatePayload(t *testing.T) { + tests := []struct { + description string + input *resourceModel + inputRoles *[]string + expected *postgresflex.CreateUserRequestPayload + isValid bool + }{ + { + "default_values", + &resourceModel{}, + &[]string{}, + &postgresflex.CreateUserRequestPayload{ + Name: nil, + Roles: &[]postgresflex.UserRole{}, + }, + true, + }, + { + "simple_values", + &resourceModel{ + Name: types.StringValue("username"), + }, + &[]string{ + "role_1", + "role_2", + }, + &postgresflex.CreateUserRequestPayload{ + Name: utils.Ptr("username"), + Roles: &[]postgresflex.UserRole{ + "role_1", + "role_2", + }, + }, + true, + }, + { + "null_fields_and_int_conversions", + &resourceModel{ + Name: types.StringNull(), + }, + &[]string{ + "", + }, + &postgresflex.CreateUserRequestPayload{ + Roles: &[]postgresflex.UserRole{ + "", + }, + Name: nil, + }, + true, + }, + { + "nil_model", + nil, + &[]string{}, + nil, + false, + }, + { + "nil_roles", + &resourceModel{}, + nil, + nil, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + output, err := toCreatePayload(tt.input, tt.inputRoles) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(output, tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestToUpdatePayload(t *testing.T) { + tests := []struct { + description string + input *resourceModel + inputRoles *[]string + expected *postgresflex.UpdateUserRequestPayload + isValid bool + }{ + { + "default_values", + &resourceModel{}, + &[]string{}, + &postgresflex.UpdateUserRequestPayload{ + Roles: &[]postgresflex.UserRole{}, + }, + true, + }, + { + "default_values", + &resourceModel{ + Name: types.StringValue("username"), + }, + &[]string{ + "role_1", + "role_2", + }, + &postgresflex.UpdateUserRequestPayload{ + Name: utils.Ptr("username"), + Roles: &[]postgresflex.UserRole{ + "role_1", + "role_2", + }, + }, + true, + }, + { + "null_fields_and_int_conversions", + &resourceModel{ + Name: types.StringNull(), + }, + &[]string{ + "", + }, + &postgresflex.UpdateUserRequestPayload{ + Roles: &[]postgresflex.UserRole{ + "", + }, + }, + true, + }, + { + "nil_model", + nil, + &[]string{}, + nil, + false, + }, + { + "nil_roles", + &resourceModel{}, + nil, + nil, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + output, err := toUpdatePayload(tt.input, tt.inputRoles) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(output, tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} diff --git a/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml b/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml new file mode 100644 index 00000000..a7d4cde6 --- /dev/null +++ b/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml @@ -0,0 +1,55 @@ +fields: + - name: 'id' + modifiers: + - 'UseStateForUnknown' + + - name: 'user_id' + modifiers: + - 'UseStateForUnknown' + + - name: 'instance_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'UseStateForUnknown' + + - name: 'project_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'name' + modifiers: + - 'UseStateForUnknown' + + - name: 'roles' + modifiers: + - 'UseStateForUnknown' + + - name: 'password' + modifiers: + - 'UseStateForUnknown' + + - name: 'host' + modifiers: + - 'UseStateForUnknown' + + - name: 'port' + modifiers: + - 'UseStateForUnknown' + + - name: 'region' + modifiers: + - 'RequiresReplace' + + - name: 'status' + modifiers: + - 'UseStateForUnknown' + + - name: 'connection_string' + modifiers: + - 'UseStateForUnknown' diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go index 4df9577d..b7698986 100644 --- a/stackit/internal/services/postgresflexalpha/user/resource.go +++ b/stackit/internal/services/postgresflexalpha/user/resource.go @@ -2,6 +2,7 @@ package postgresflexalpha import ( "context" + _ "embed" "errors" "fmt" "math" @@ -9,60 +10,53 @@ import ( "strconv" "strings" + "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" + postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" - postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" - - "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator" - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate" - - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/path" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/stackitcloud/stackit-sdk-go/core/oapierror" ) -// Ensure the implementation satisfies the expected interfaces. var ( + // Ensure the implementation satisfies the expected interfaces. _ resource.Resource = &userResource{} _ resource.ResourceWithConfigure = &userResource{} _ resource.ResourceWithImportState = &userResource{} _ resource.ResourceWithModifyPlan = &userResource{} -) + _ resource.ResourceWithIdentity = &userResource{} -type Model struct { - Id types.String `tfsdk:"id"` // needed by TF - UserId types.Int64 `tfsdk:"user_id"` - InstanceId types.String `tfsdk:"instance_id"` - ProjectId types.String `tfsdk:"project_id"` - Username types.String `tfsdk:"username"` - Roles types.Set `tfsdk:"roles"` - Password types.String `tfsdk:"password"` - Host types.String `tfsdk:"host"` - Port types.Int64 `tfsdk:"port"` - Region types.String `tfsdk:"region"` - Status types.String `tfsdk:"status"` - ConnectionString types.String `tfsdk:"connection_string"` -} + // Error message constants + extractErrorSummary = "extracting failed" + extractErrorMessage = "Extracting identity data: %v" +) // NewUserResource is a helper function to simplify the provider implementation. func NewUserResource() resource.Resource { return &userResource{} } -// userResource is the resource implementation. +// resourceModel represents the Terraform resource state for a PostgreSQL Flex user. +type resourceModel = postgresflexalpha.UserModel + +// UserResourceIdentityModel describes the resource's identity attributes. +type UserResourceIdentityModel struct { + ProjectID types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + InstanceID types.String `tfsdk:"instance_id"` + UserID types.Int64 `tfsdk:"database_id"` +} + +// userResource implements the resource handling for a PostgreSQL Flex user. type userResource struct { client *postgresflex.APIClient providerData core.ProviderData @@ -75,7 +69,7 @@ func (r *userResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - var configModel Model + var configModel resourceModel // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return @@ -85,7 +79,7 @@ func (r *userResource) ModifyPlan( return } - var planModel Model + var planModel resourceModel resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) if resp.Diagnostics.HasError() { return @@ -123,117 +117,25 @@ func (r *userResource) Configure(ctx context.Context, req resource.ConfigureRequ tflog.Info(ctx, "Postgres Flex user client configured") } +//go:embed planModifiers.yaml +var modifiersFileByte []byte + // Schema defines the schema for the resource. -func (r *userResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - rolesOptions := []string{"login", "createdb", "createrole"} +func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + s := postgresflexalpha.UserResourceSchema(ctx) - descriptions := map[string]string{ - "main": "Postgres Flex user resource schema. Must have a `region` specified in the provider configuration.", - "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".", - "user_id": "User ID.", - "instance_id": "ID of the PostgresFlex instance.", - "project_id": "STACKIT project ID to which the instance is associated.", - "username": "The name of the user.", - "roles": "Database access levels for the user. " + utils.FormatPossibleValues(rolesOptions...), - "region": "The resource region. If not defined, the provider region is used.", - "status": "The current status of the user.", - "password": "The password for the user. This is only set upon creation.", - "host": "The host of the Postgres Flex instance.", - "port": "The port of the Postgres Flex instance.", - "connection_string": "The connection string for the user to the instance.", + fields, err := utils.ReadModifiersConfig(modifiersFileByte) + if err != nil { + resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) + return } - resp.Schema = schema.Schema{ - Description: descriptions["main"], - Attributes: map[string]schema.Attribute{ - "id": schema.StringAttribute{ - Description: descriptions["id"], - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, - "user_id": schema.Int64Attribute{ - Description: descriptions["user_id"], - Computed: true, - PlanModifiers: []planmodifier.Int64{ - int64planmodifier.UseStateForUnknown(), - }, - Validators: []validator.Int64{}, - }, - "instance_id": schema.StringAttribute{ - Description: descriptions["instance_id"], - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "project_id": schema.StringAttribute{ - Description: descriptions["project_id"], - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "username": schema.StringAttribute{ - Description: descriptions["username"], - Required: true, - PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - }, - }, - "roles": schema.SetAttribute{ - Description: descriptions["roles"], - ElementType: types.StringType, - Required: true, - Validators: []validator.Set{ - setvalidator.ValueStringsAre( - stringvalidator.OneOf(rolesOptions...), - ), - }, - }, - "password": schema.StringAttribute{ - Description: descriptions["password"], - Computed: true, - Sensitive: true, - }, - "host": schema.StringAttribute{ - Description: descriptions["host"], - Computed: true, - }, - "port": schema.Int64Attribute{ - Description: descriptions["port"], - Computed: true, - }, - "region": schema.StringAttribute{ - Optional: true, - // must be computed to allow for storing the override value from the provider - Computed: true, - Description: descriptions["region"], - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "status": schema.StringAttribute{ - Description: descriptions["status"], - Computed: true, - }, - "connection_string": schema.StringAttribute{ - Description: descriptions["connection_string"], - Computed: true, - }, - }, + err = utils.AddPlanModifiersToResourceSchema(fields, &s) + if err != nil { + resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) + return } + resp.Schema = s } // Create creates the resource and sets the initial Terraform state. @@ -242,16 +144,33 @@ func (r *userResource) Create( req resource.CreateRequest, resp *resource.CreateResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + // Read identity data + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + ctx = core.InitProviderContext(ctx) - ctx = r.setTFLogFields(ctx, &model) - arg := r.getClientArg(&model) + + arg, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } + + ctx = r.setTFLogFields(ctx, arg) var roles = r.expandRoles(ctx, model.Roles, &resp.Diagnostics) if resp.Diagnostics.HasError() { @@ -277,8 +196,6 @@ func (r *userResource) Create( return } - ctx = core.LogResponse(ctx) - if userResp.Id == nil || *userResp.Id == 0 { core.LogAndAddError( ctx, @@ -288,12 +205,28 @@ func (r *userResource) Create( ) return } + model.Id = types.Int64PointerValue(userResp.Id) model.UserId = types.Int64PointerValue(userResp.Id) model.Password = types.StringPointerValue(userResp.Password) ctx = tflog.SetField(ctx, "user_id", *userResp.Id) - exists, err := r.getUserResource(ctx, &model) + ctx = core.LogResponse(ctx) + + // Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(arg.projectId), + Region: types.StringValue(arg.region), + InstanceID: types.StringValue(arg.instanceId), + UserID: types.Int64PointerValue(userResp.Id), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + // Verify creation + exists, err := r.getUserResource(ctx, &model, arg) if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err)) @@ -322,16 +255,38 @@ func (r *userResource) Read( req resource.ReadRequest, resp *resource.ReadResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + // Read identity data + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + ctx = core.InitProviderContext(ctx) - exists, err := r.getUserResource(ctx, &model) + arg, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } + + ctx = r.setTFLogFields(ctx, arg) + + ctx = core.InitProviderContext(ctx) + + // Read resource state + exists, err := r.getUserResource(ctx, &model, arg) if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading user", fmt.Sprintf("Calling API: %v", err)) @@ -360,19 +315,37 @@ func (r *userResource) Update( req resource.UpdateRequest, resp *resource.UpdateResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + // Read identity data + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + arg, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } + + ctx = r.setTFLogFields(ctx, arg) ctx = core.InitProviderContext(ctx) - ctx = r.setTFLogFields(ctx, &model) - arg := r.getClientArg(&model) // Retrieve values from state - var stateModel Model + var stateModel resourceModel diags = req.State.Get(ctx, &stateModel) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -413,7 +386,8 @@ func (r *userResource) Update( ctx = core.LogResponse(ctx) - exists, err := r.getUserResource(ctx, &stateModel) + // Verify update + exists, err := r.getUserResource(ctx, &stateModel, arg) if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Calling API: %v", err)) @@ -443,16 +417,33 @@ func (r *userResource) Delete( req resource.DeleteRequest, resp *resource.DeleteResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + // Read identity data + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } ctx = core.InitProviderContext(ctx) - ctx = r.setTFLogFields(ctx, &model) - arg := r.getClientArg(&model) + + arg, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } + + ctx = r.setTFLogFields(ctx, arg) + ctx = core.InitProviderContext(ctx) userId64 := arg.userId if userId64 > math.MaxInt32 { @@ -469,7 +460,8 @@ func (r *userResource) Delete( ctx = core.LogResponse(ctx) - exists, err := r.getUserResource(ctx, &model) + // Verify deletion + exists, err := r.getUserResource(ctx, &model, arg) if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err)) return @@ -487,40 +479,31 @@ func (r *userResource) Delete( tflog.Info(ctx, "Postgres Flex user deleted") } -// ImportState imports a resource into the Terraform state on success. -// The expected format of the resource import identifier is: project_id,zone_id,record_set_id -func (r *userResource) ImportState( - ctx context.Context, - req resource.ImportStateRequest, - resp *resource.ImportStateResponse, +// IdentitySchema defines the fields that are required to uniquely identify a resource. +func (r *userResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + response *resource.IdentitySchemaResponse, ) { - idParts := strings.Split(req.ID, core.Separator) - if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { - core.LogAndAddError( - ctx, &resp.Diagnostics, - "Error importing user", - fmt.Sprintf( - "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q", - req.ID, - ), - ) - return + response.IdentitySchema = identityschema.Schema{ + Attributes: map[string]identityschema.Attribute{ + "project_id": identityschema.StringAttribute{ + RequiredForImport: true, + }, + "region": identityschema.StringAttribute{ + RequiredForImport: true, + }, + "instance_id": identityschema.StringAttribute{ + RequiredForImport: true, + }, + "user_id": identityschema.Int64Attribute{ + RequiredForImport: true, + }, + }, } - - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), idParts[3])...) - core.LogAndAddWarning( - ctx, - &resp.Diagnostics, - "postgresflexalpha user imported with empty password and empty uri", - "The user password and uri are not imported as they are only available upon creation of a new user. The password and uri fields will be empty.", - ) - tflog.Info(ctx, "postgresflexalpha user state imported") } -func mapFields(userResp *postgresflex.GetUserResponse, model *Model, region string) error { +func mapFields(userResp *postgresflex.GetUserResponse, model *resourceModel, region string) error { if userResp == nil { return fmt.Errorf("response is nil") } @@ -537,14 +520,12 @@ func mapFields(userResp *postgresflex.GetUserResponse, model *Model, region stri } else { return fmt.Errorf("user id not present") } - model.Id = utils.BuildInternalTerraformId( - model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10), - ) + model.UserId = types.Int64Value(userId) - model.Username = types.StringPointerValue(user.Name) + model.Name = types.StringPointerValue(user.Name) if user.Roles == nil { - model.Roles = types.SetNull(types.StringType) + model.Roles = types.List(types.SetNull(types.StringType)) } else { var roles []attr.Value for _, role := range *user.Roles { @@ -554,27 +535,21 @@ func mapFields(userResp *postgresflex.GetUserResponse, model *Model, region stri if diags.HasError() { return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) } - model.Roles = rolesSet + model.Roles = types.List(rolesSet) } - model.Host = types.StringPointerValue(user.Host) - model.Port = types.Int64PointerValue(user.Port) model.Region = types.StringValue(region) model.Status = types.StringPointerValue(user.Status) - model.ConnectionString = types.StringPointerValue(user.ConnectionString) return nil } // getUserResource refreshes the resource state by calling the API and mapping the response to the model. // Returns true if the resource state was successfully refreshed, false if the resource does not exist. -func (r *userResource) getUserResource(ctx context.Context, model *Model) (bool, error) { - ctx = r.setTFLogFields(ctx, model) - arg := r.getClientArg(model) +func (r *userResource) getUserResource(ctx context.Context, model *resourceModel, arg *clientArg) (bool, error) { - userId64 := arg.userId - if userId64 > math.MaxInt32 { + if arg.userId > math.MaxInt32 { return false, errors.New("error in type conversion: int value too large (userId)") } - userId := int32(userId64) + userId := int32(arg.userId) // API Call userResp, err := r.client.GetUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() @@ -588,13 +563,14 @@ func (r *userResource) getUserResource(ctx context.Context, model *Model) (bool, return false, fmt.Errorf("error fetching user resource: %w", err) } - if err := mapFields(userResp, model, arg.region); err != nil { + if err := mapResourceFields(userResp, model, arg.region); err != nil { return false, fmt.Errorf("error mapping user resource: %w", err) } return true, nil } +// clientArg holds the arguments for API calls. type clientArg struct { projectId string instanceId string @@ -602,29 +578,137 @@ type clientArg struct { userId int64 } -// getClientArg constructs client arguments from the model. -func (r *userResource) getClientArg(model *Model) *clientArg { - return &clientArg{ - projectId: model.ProjectId.ValueString(), - instanceId: model.InstanceId.ValueString(), - region: r.providerData.GetRegionWithOverride(model.Region), - userId: model.UserId.ValueInt64(), +// ImportState imports a resource into the Terraform state on success. +// The expected import identifier format is: [project_id],[region],[instance_id],[database_id] +func (r *userResource) ImportState( + ctx context.Context, + req resource.ImportStateRequest, + resp *resource.ImportStateResponse, +) { + + ctx = core.InitProviderContext(ctx) + + if req.ID != "" { + + idParts := strings.Split(req.ID, core.Separator) + + if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { + core.LogAndAddError( + ctx, &resp.Diagnostics, + "Error importing user", + fmt.Sprintf( + "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q", + req.ID, + ), + ) + return + } + + userId, err := strconv.ParseInt(idParts[3], 10, 64) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error importing user", + fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...) + + tflog.Info(ctx, "Postgres Flex user state imported") + + return } + + // If no ID is provided, attempt to read identity attributes from the import configuration + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + userId := identityData.UserID.ValueInt64() + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...) + + tflog.Info(ctx, "Postgres Flex user state imported") +} + +// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model. +func (r *userResource) extractIdentityData( + model resourceModel, + identity UserResourceIdentityModel, +) (*clientArg, error) { + + var projectId, region, instanceId string + var userId int64 + + if !model.UserId.IsNull() && !model.UserId.IsUnknown() { + userId = model.UserId.ValueInt64() + } else { + if identity.UserID.IsNull() || identity.UserID.IsUnknown() { + return nil, fmt.Errorf("user_id not found in config") + } + userId = identity.UserID.ValueInt64() + } + + if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { + projectId = model.ProjectId.ValueString() + } else { + if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() { + return nil, fmt.Errorf("project_id not found in config") + } + projectId = identity.ProjectID.ValueString() + } + + if !model.Region.IsNull() && !model.Region.IsUnknown() { + region = r.providerData.GetRegionWithOverride(model.Region) + } else { + if identity.Region.IsNull() || identity.Region.IsUnknown() { + return nil, fmt.Errorf("region not found in config") + } + region = r.providerData.GetRegionWithOverride(identity.Region) + } + + if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() { + instanceId = model.InstanceId.ValueString() + } else { + if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() { + return nil, fmt.Errorf("instance_id not found in config") + } + instanceId = identity.InstanceID.ValueString() + } + return &clientArg{ + projectId: projectId, + instanceId: instanceId, + region: region, + userId: userId, + }, nil } // setTFLogFields adds relevant fields to the context for terraform logging purposes. -func (r *userResource) setTFLogFields(ctx context.Context, model *Model) context.Context { - usrCtx := r.getClientArg(model) - - ctx = tflog.SetField(ctx, "project_id", usrCtx.projectId) - ctx = tflog.SetField(ctx, "instance_id", usrCtx.instanceId) - ctx = tflog.SetField(ctx, "user_id", usrCtx.userId) - ctx = tflog.SetField(ctx, "region", usrCtx.region) +func (r *userResource) setTFLogFields(ctx context.Context, arg *clientArg) context.Context { + ctx = tflog.SetField(ctx, "project_id", arg.projectId) + ctx = tflog.SetField(ctx, "instance_id", arg.instanceId) + ctx = tflog.SetField(ctx, "region", arg.region) + ctx = tflog.SetField(ctx, "user_id", arg.userId) return ctx } -func (r *userResource) expandRoles(ctx context.Context, rolesSet types.Set, diags *diag.Diagnostics) []string { +// expandRoles converts a Terraform list of roles to a string slice. +func (r *userResource) expandRoles(ctx context.Context, rolesSet types.List, diags *diag.Diagnostics) []string { if rolesSet.IsNull() || rolesSet.IsUnknown() { return nil } @@ -632,42 +716,3 @@ func (r *userResource) expandRoles(ctx context.Context, rolesSet types.Set, diag diags.Append(rolesSet.ElementsAs(ctx, &roles, false)...) return roles } - -func toCreatePayload(model *Model, roles *[]string) (*postgresflex.CreateUserRequestPayload, error) { - if model == nil { - return nil, fmt.Errorf("nil model") - } - if roles == nil { - return nil, fmt.Errorf("nil roles") - } - - return &postgresflex.CreateUserRequestPayload{ - Roles: toPayloadRoles(roles), - Name: conversion.StringValueToPointer(model.Username), - }, nil -} - -func toPayloadRoles(roles *[]string) *[]postgresflex.UserRole { - var userRoles = make([]postgresflex.UserRole, 0, len(*roles)) - for _, role := range *roles { - userRoles = append(userRoles, postgresflex.UserRole(role)) - } - return &userRoles -} - -func toUpdatePayload(model *Model, roles *[]string) ( - *postgresflex.UpdateUserRequestPayload, - error, -) { - if model == nil { - return nil, fmt.Errorf("nil model") - } - if roles == nil { - return nil, fmt.Errorf("nil roles") - } - - return &postgresflex.UpdateUserRequestPayload{ - Name: conversion.StringValueToPointer(model.Username), - Roles: toPayloadRoles(roles), - }, nil -} diff --git a/stackit/internal/services/postgresflexalpha/user/resource_test.go b/stackit/internal/services/postgresflexalpha/user/resource_test.go deleted file mode 100644 index e4a13482..00000000 --- a/stackit/internal/services/postgresflexalpha/user/resource_test.go +++ /dev/null @@ -1,448 +0,0 @@ -package postgresflexalpha - -import ( - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/stackitcloud/stackit-sdk-go/core/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" -) - -func TestMapFieldsCreate(t *testing.T) { - const testRegion = "region" - tests := []struct { - description string - input *postgresflexalpha.GetUserResponse - region string - expected Model - isValid bool - }{ - { - "default_values", - &postgresflexalpha.GetUserResponse{ - Id: utils.Ptr(int64(1)), - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.SetNull(types.StringType), - Password: types.StringNull(), - Host: types.StringNull(), - Port: types.Int64Null(), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), - }, - true, - }, - { - "simple_values", - &postgresflexalpha.GetUserResponse{ - Id: utils.Ptr(int64(1)), - Name: utils.Ptr("username"), - ConnectionString: utils.Ptr("connection_string"), - Status: utils.Ptr("status"), - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringValue("username"), - Roles: types.SetNull(types.StringType), - Password: types.StringNull(), - Host: types.StringNull(), - Port: types.Int64Null(), - Region: types.StringValue(testRegion), - Status: types.StringValue("status"), - ConnectionString: types.StringValue("connection_string"), - }, - true, - }, - { - "null_fields_and_int_conversions", - &postgresflexalpha.GetUserResponse{ - Id: utils.Ptr(int64(1)), - Name: nil, - ConnectionString: nil, - Status: nil, - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.SetNull(types.StringType), - Password: types.StringNull(), - Host: types.StringNull(), - Port: types.Int64Null(), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), - }, - true, - }, - { - "nil_response", - nil, - testRegion, - Model{}, - false, - }, - { - "nil_response_2", - &postgresflexalpha.GetUserResponse{}, - testRegion, - Model{}, - false, - }, - { - "no_resource_id", - &postgresflexalpha.GetUserResponse{}, - testRegion, - Model{}, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - state := &Model{ - ProjectId: tt.expected.ProjectId, - InstanceId: tt.expected.InstanceId, - } - - err := mapFields(tt.input, state, tt.region) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(state, &tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} - -func TestMapFields(t *testing.T) { - const testRegion = "region" - tests := []struct { - description string - input *postgresflexalpha.GetUserResponse - region string - expected Model - isValid bool - }{ - { - "default_values", - &postgresflexalpha.GetUserResponse{}, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(int64(1)), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.SetNull(types.StringType), - Host: types.StringNull(), - Port: types.Int64Null(), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), - }, - true, - }, - { - "simple_values", - &postgresflexalpha.GetUserResponse{ - Roles: &[]postgresflexalpha.UserRole{ - "role_1", - "role_2", - "", - }, - Name: utils.Ptr("username"), - Host: utils.Ptr("host"), - Port: utils.Ptr(int64(1234)), - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringValue("username"), - Roles: types.SetValueMust( - types.StringType, []attr.Value{ - types.StringValue("role_1"), - types.StringValue("role_2"), - types.StringValue(""), - }, - ), - Host: types.StringValue("host"), - Port: types.Int64Value(1234), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), - }, - true, - }, - { - "null_fields_and_int_conversions", - &postgresflexalpha.GetUserResponse{ - Id: utils.Ptr(int64(1)), - Name: nil, - Host: nil, - Port: utils.Ptr(int64(2123456789)), - }, - testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.SetNull(types.StringType), - Host: types.StringNull(), - Port: types.Int64Value(2123456789), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), - }, - true, - }, - { - "nil_response", - nil, - testRegion, - Model{}, - false, - }, - { - "nil_response_2", - &postgresflexalpha.GetUserResponse{}, - testRegion, - Model{}, - false, - }, - { - "no_resource_id", - &postgresflexalpha.GetUserResponse{}, - testRegion, - Model{}, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - state := &Model{ - ProjectId: tt.expected.ProjectId, - InstanceId: tt.expected.InstanceId, - UserId: tt.expected.UserId, - } - err := mapFields(tt.input, state, tt.region) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(state, &tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} - -func TestToCreatePayload(t *testing.T) { - tests := []struct { - description string - input *Model - inputRoles *[]string - expected *postgresflexalpha.CreateUserRequestPayload - isValid bool - }{ - { - "default_values", - &Model{}, - &[]string{}, - &postgresflexalpha.CreateUserRequestPayload{ - Name: nil, - Roles: &[]postgresflexalpha.UserRole{}, - }, - true, - }, - { - "simple_values", - &Model{ - Username: types.StringValue("username"), - }, - &[]string{ - "role_1", - "role_2", - }, - &postgresflexalpha.CreateUserRequestPayload{ - Name: utils.Ptr("username"), - Roles: &[]postgresflexalpha.UserRole{ - "role_1", - "role_2", - }, - }, - true, - }, - { - "null_fields_and_int_conversions", - &Model{ - Username: types.StringNull(), - }, - &[]string{ - "", - }, - &postgresflexalpha.CreateUserRequestPayload{ - Roles: &[]postgresflexalpha.UserRole{ - "", - }, - Name: nil, - }, - true, - }, - { - "nil_model", - nil, - &[]string{}, - nil, - false, - }, - { - "nil_roles", - &Model{}, - nil, - nil, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - output, err := toCreatePayload(tt.input, tt.inputRoles) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(output, tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} - -func TestToUpdatePayload(t *testing.T) { - tests := []struct { - description string - input *Model - inputRoles *[]string - expected *postgresflexalpha.UpdateUserRequestPayload - isValid bool - }{ - { - "default_values", - &Model{}, - &[]string{}, - &postgresflexalpha.UpdateUserRequestPayload{ - Roles: &[]postgresflexalpha.UserRole{}, - }, - true, - }, - { - "default_values", - &Model{ - Username: types.StringValue("username"), - }, - &[]string{ - "role_1", - "role_2", - }, - &postgresflexalpha.UpdateUserRequestPayload{ - Name: utils.Ptr("username"), - Roles: &[]postgresflexalpha.UserRole{ - "role_1", - "role_2", - }, - }, - true, - }, - { - "null_fields_and_int_conversions", - &Model{ - Username: types.StringNull(), - }, - &[]string{ - "", - }, - &postgresflexalpha.UpdateUserRequestPayload{ - Roles: &[]postgresflexalpha.UserRole{ - "", - }, - }, - true, - }, - { - "nil_model", - nil, - &[]string{}, - nil, - false, - }, - { - "nil_roles", - &Model{}, - nil, - nil, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - output, err := toUpdatePayload(tt.input, tt.inputRoles) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(output, tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} diff --git a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go index 9734c2a9..f07ab701 100644 --- a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go +++ b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go @@ -19,11 +19,6 @@ func UserResourceSchema(ctx context.Context) schema.Schema { Description: "The connection string for the user to the instance.", MarkdownDescription: "The connection string for the user to the instance.", }, - "host": schema.StringAttribute{ - Computed: true, - Description: "The host of the instance in which the user belongs to.", - MarkdownDescription: "The host of the instance in which the user belongs to.", - }, "id": schema.Int64Attribute{ Computed: true, Description: "The ID of the user.", @@ -45,11 +40,6 @@ func UserResourceSchema(ctx context.Context) schema.Schema { Description: "The password for the user.", MarkdownDescription: "The password for the user.", }, - "port": schema.Int64Attribute{ - Computed: true, - Description: "The port of the instance in which the user belongs to.", - MarkdownDescription: "The port of the instance in which the user belongs to.", - }, "project_id": schema.StringAttribute{ Optional: true, Computed: true, @@ -91,12 +81,10 @@ func UserResourceSchema(ctx context.Context) schema.Schema { type UserModel struct { ConnectionString types.String `tfsdk:"connection_string"` - Host types.String `tfsdk:"host"` Id types.Int64 `tfsdk:"id"` InstanceId types.String `tfsdk:"instance_id"` Name types.String `tfsdk:"name"` Password types.String `tfsdk:"password"` - Port types.Int64 `tfsdk:"port"` ProjectId types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` Roles types.List `tfsdk:"roles"` diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasource.go b/stackit/internal/services/sqlserverflexalpha/database/datasource.go index cd796159..3c201b5a 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/datasource.go @@ -4,6 +4,8 @@ import ( "context" "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" @@ -12,6 +14,12 @@ import ( sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" ) +// dataSourceModel maps the data source schema data. +type dataSourceModel struct { + sqlserverflexalphaGen.DatabaseModel + TerraformID types.String `tfsdk:"id"` +} + var _ datasource.DataSource = (*databaseDataSource)(nil) func NewDatabaseDataSource() datasource.DataSource { @@ -23,16 +31,31 @@ type databaseDataSource struct { providerData core.ProviderData } -func (d *databaseDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *databaseDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database" } func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - resp.Schema = sqlserverflexalphaGen.DatabaseDataSourceSchema(ctx) + s := sqlserverflexalphaGen.DatabaseDataSourceSchema(ctx) + s.Attributes["id"] = schema.StringAttribute{ + Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," + + "`database_id`\\\".\",", + Computed: true, + } + + resp.Schema = s } // Configure adds the provider configured client to the data source. -func (d *databaseDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *databaseDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { var ok bool d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { @@ -48,7 +71,7 @@ func (d *databaseDataSource) Configure(ctx context.Context, req datasource.Confi } func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data sqlserverflexalphaGen.DatabaseModel + var data dataSourceModel // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go index 25406f5f..82250802 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go +++ b/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go @@ -29,7 +29,7 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema { Description: "The name of the database.", MarkdownDescription: "The name of the database.", }, - "id": schema.Int64Attribute{ + "tf_original_api_id": schema.Int64Attribute{ Computed: true, Description: "The id of the database.", MarkdownDescription: "The id of the database.", @@ -72,7 +72,7 @@ type DatabaseModel struct { CollationName types.String `tfsdk:"collation_name"` CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"` DatabaseName types.String `tfsdk:"database_name"` - Id types.Int64 `tfsdk:"id"` + Id types.Int64 `tfsdk:"tf_original_api_id"` InstanceId types.String `tfsdk:"instance_id"` Name types.String `tfsdk:"name"` Owner types.String `tfsdk:"owner"` diff --git a/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml new file mode 100644 index 00000000..d6209230 --- /dev/null +++ b/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml @@ -0,0 +1,50 @@ +fields: + - name: 'id' + modifiers: + - 'UseStateForUnknown' + + - name: 'instance_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'RequiresReplace' + + - name: 'project_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'RequiresReplace' + + - name: 'region' + modifiers: + - 'RequiresReplace' + + - name: 'name' + modifiers: + - 'RequiresReplace' + + - name: 'collation' + modifiers: + - 'RequiresReplace' + + - name: 'owner' + modifiers: + - 'RequiresReplace' + + - name: 'database_name' + modifiers: + - 'UseStateForUnknown' + + - name: 'collation_name' + modifiers: + - 'UseStateForUnknown' + + - name: 'compatibility' + modifiers: + - 'RequiresReplace' + + - name: 'compatibility_level' + modifiers: + - 'UseStateForUnknown' diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index 52866a9c..f3dc6816 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -2,11 +2,15 @@ package sqlserverflexalpha import ( "context" + _ "embed" "fmt" + "strconv" "strings" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" @@ -23,12 +27,24 @@ var ( _ resource.ResourceWithConfigure = &databaseResource{} _ resource.ResourceWithImportState = &databaseResource{} _ resource.ResourceWithModifyPlan = &databaseResource{} + _ resource.ResourceWithIdentity = &databaseResource{} ) func NewDatabaseResource() resource.Resource { return &databaseResource{} } +// resourceModel describes the resource data model. +type resourceModel = sqlserverflexalphaGen.DatabaseModel + +// DatabaseResourceIdentityModel describes the resource's identity attributes. +type DatabaseResourceIdentityModel struct { + ProjectID types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + InstanceID types.String `tfsdk:"instance_id"` + DatabaseName types.String `tfsdk:"database_name"` +} + type databaseResource struct { client *sqlserverflexalpha.APIClient providerData core.ProviderData @@ -38,8 +54,47 @@ func (r *databaseResource) Metadata(_ context.Context, req resource.MetadataRequ resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database" } +//go:embed planModifiers.yaml +var modifiersFileByte []byte + func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = sqlserverflexalphaGen.DatabaseResourceSchema(ctx) + s := sqlserverflexalphaGen.DatabaseResourceSchema(ctx) + + fields, err := utils.ReadModifiersConfig(modifiersFileByte) + if err != nil { + resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) + return + } + + err = utils.AddPlanModifiersToResourceSchema(fields, &s) + if err != nil { + resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) + return + } + resp.Schema = s +} + +func (r *databaseResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + resp *resource.IdentitySchemaResponse, +) { + resp.IdentitySchema = identityschema.Schema{ + Attributes: map[string]identityschema.Attribute{ + "project_id": identityschema.StringAttribute{ + RequiredForImport: true, // must be set during import by the practitioner + }, + "region": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "instance_id": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "database_name": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + }, + } } // Configure adds the provider configured client to the resource. @@ -59,7 +114,10 @@ func (r *databaseResource) Configure( utils.UserAgentConfigOption(r.providerData.Version), } if r.providerData.PostgresFlexCustomEndpoint != "" { - apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.PostgresFlexCustomEndpoint)) + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(r.providerData.PostgresFlexCustomEndpoint), + ) } else { apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) } @@ -67,7 +125,10 @@ func (r *databaseResource) Configure( if err != nil { resp.Diagnostics.AddError( "Error configuring API client", - fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", err), + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), ) return } @@ -97,7 +158,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques } func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data sqlserverflexalphaGen.DatabaseModel + var data resourceModel // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -115,7 +176,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r } func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data sqlserverflexalphaGen.DatabaseModel + var data resourceModel // Read Terraform plan data into the model resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) @@ -182,36 +243,72 @@ func (r *databaseResource) ModifyPlan( } // ImportState imports a resource into the Terraform state on success. -// The expected format of the resource import identifier is: project_id,zone_id,record_set_id +// The expected import identifier format is: [project_id],[region],[instance_id],[database_id] func (r *databaseResource) ImportState( ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - idParts := strings.Split(req.ID, core.Separator) - // Todo: Import logic - if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" { - core.LogAndAddError( - ctx, &resp.Diagnostics, - "Error importing database", - fmt.Sprintf( - "Expected import identifier with format [project_id],[region],..., got %q", - req.ID, - ), + ctx = core.InitProviderContext(ctx) + + if req.ID != "" { + + idParts := strings.Split(req.ID, core.Separator) + + if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { + core.LogAndAddError( + ctx, &resp.Diagnostics, + "Error importing database", + fmt.Sprintf( + "Expected import identifier with format [project_id],[region],[instance_id],[database_name], got %q", + req.ID, + ), + ) + return + } + + databaseId, err := strconv.ParseInt(idParts[3], 10, 64) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error importing database", + fmt.Sprintf("Invalid database_id format: %q. It must be a valid integer.", idParts[3]), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseId)...) + + core.LogAndAddWarning( + ctx, + &resp.Diagnostics, + "Sqlserverflexalpha database imported with empty password", + "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", ) + tflog.Info(ctx, "Sqlserverflexalpha database state imported") + } + + // If no ID is provided, attempt to read identity attributes from the import configuration + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { return } - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) - // ... more ... + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + databaseName := identityData.DatabaseName.ValueString() + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseName)...) - core.LogAndAddWarning( - ctx, - &resp.Diagnostics, - "Sqlserverflexalpha database imported with empty password", - "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", - ) tflog.Info(ctx, "Sqlserverflexalpha database state imported") } diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go index 1deb2beb..a48e7572 100644 --- a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go @@ -26,6 +26,11 @@ var ( _ datasource.DataSourceWithConfigure = &flavorDataSource{} ) +// NewFlavorDataSource is a helper function to simplify the provider implementation. +func NewFlavorDataSource() datasource.DataSource { + return &flavorDataSource{} +} + type FlavorModel struct { ProjectId types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` @@ -41,11 +46,6 @@ type FlavorModel struct { StorageClasses types.List `tfsdk:"storage_classes"` } -// NewFlavorDataSource is a helper function to simplify the provider implementation. -func NewFlavorDataSource() datasource.DataSource { - return &flavorDataSource{} -} - // flavorDataSource is the data source implementation. type flavorDataSource struct { client *sqlserverflexalpha.APIClient @@ -53,12 +53,20 @@ type flavorDataSource struct { } // Metadata returns the data source type name. -func (r *flavorDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (r *flavorDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_flavor" } // Configure adds the provider configured client to the data source. -func (r *flavorDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (r *flavorDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { var ok bool r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { @@ -212,11 +220,13 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, model.MinGb = types.Int64Value(*f.MinGB) if f.StorageClasses == nil { - model.StorageClasses = types.ListNull(sqlserverflexalphaGen.StorageClassesType{ - ObjectType: basetypes.ObjectType{ - AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx), + model.StorageClasses = types.ListNull( + sqlserverflexalphaGen.StorageClassesType{ + ObjectType: basetypes.ObjectType{ + AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx), + }, }, - }) + ) } else { var scList []attr.Value for _, sc := range *f.StorageClasses { diff --git a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go index 27609fc5..c1d4de36 100644 --- a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go @@ -13,8 +13,12 @@ import ( sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen" ) +// dataSourceModel maps the data source schema data. +type dataSourceModel = sqlserverflexalphaGen.FlavorsModel + var _ datasource.DataSource = (*flavorsDataSource)(nil) +// TODO: Use NewFlavorsDataSource when datasource is implemented func NewFlavorsDataSource() datasource.DataSource { return &flavorsDataSource{} } @@ -24,7 +28,11 @@ type flavorsDataSource struct { providerData core.ProviderData } -func (d *flavorsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *flavorsDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_flavors" } @@ -33,7 +41,11 @@ func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaReque } // Configure adds the provider configured client to the data source. -func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *flavorsDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { var ok bool d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { @@ -49,7 +61,7 @@ func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.Config } func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data sqlserverflexalphaGen.FlavorsModel + var data dataSourceModel // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) diff --git a/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go index 43ac64f5..40f086e2 100644 --- a/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go +++ b/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go @@ -33,7 +33,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema { Description: "The flavor description.", MarkdownDescription: "The flavor description.", }, - "id": schema.StringAttribute{ + "tf_original_api_id": schema.StringAttribute{ Computed: true, Description: "The id of the instance flavor.", MarkdownDescription: "The id of the instance flavor.", diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go index 3f8f787e..0d58140c 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go @@ -7,6 +7,8 @@ import ( "fmt" "net/http" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen" sqlserverflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" @@ -20,6 +22,12 @@ import ( "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) +// dataSourceModel maps the data source schema data. +type dataSourceModel struct { + sqlserverflexalpha2.InstanceModel + TerraformID types.String `tfsdk:"id"` +} + // Ensure the implementation satisfies the expected interfaces. var ( _ datasource.DataSource = &instanceDataSource{} @@ -37,12 +45,20 @@ type instanceDataSource struct { } // Metadata returns the data source type name. -func (r *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (r *instanceDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_instance" } // Configure adds the provider configured client to the data source. -func (r *instanceDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (r *instanceDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { var ok bool r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { @@ -59,167 +75,22 @@ func (r *instanceDataSource) Configure(ctx context.Context, req datasource.Confi // Schema defines the schema for the data source. func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - //descriptions := map[string]string{ - // "main": "SQLServer Flex ALPHA instance resource schema. Must have a `region` specified in the provider configuration.", - // "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`\".", - // "instance_id": "ID of the SQLServer Flex instance.", - // "project_id": "STACKIT project ID to which the instance is associated.", - // "name": "Instance name.", - // "access_scope": "The access scope of the instance. (e.g. SNA)", - // "acl": "The Access Control List (ACL) for the SQLServer Flex instance.", - // "backup_schedule": `The backup schedule. Should follow the cron scheduling system format (e.g. "0 0 * * *")`, - // "region": "The resource region. If not defined, the provider region is used.", - // "encryption": "The encryption block.", - // "network": "The network block.", - // "keyring_id": "STACKIT KMS - KeyRing ID of the encryption key to use.", - // "key_id": "STACKIT KMS - Key ID of the encryption key to use.", - // "key_version": "STACKIT KMS - Key version to use in the encryption key.", - // "service:account": "STACKIT KMS - service account to use in the encryption key.", - // "instance_address": "The returned instance IP address of the SQLServer Flex instance.", - // "router_address": "The returned router IP address of the SQLServer Flex instance.", - //} + s := sqlserverflexalpha.InstanceDataSourceSchema(ctx) + s.Attributes["id"] = schema.StringAttribute{ + Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`\\\".", + Computed: true, + } - resp.Schema = sqlserverflexalpha.InstanceDataSourceSchema(ctx) - - //resp.Schema = schema.Schema{ - // Description: descriptions["main"], - // Attributes: map[string]schema.Attribute{ - // "id": schema.StringAttribute{ - // Description: descriptions["id"], - // Computed: true, - // }, - // "instance_id": schema.StringAttribute{ - // Description: descriptions["instance_id"], - // Required: true, - // Validators: []validator.String{ - // validate.UUID(), - // validate.NoSeparator(), - // }, - // }, - // "project_id": schema.StringAttribute{ - // Description: descriptions["project_id"], - // Required: true, - // Validators: []validator.String{ - // validate.UUID(), - // validate.NoSeparator(), - // }, - // }, - // "name": schema.StringAttribute{ - // Description: descriptions["name"], - // Computed: true, - // }, - // "backup_schedule": schema.StringAttribute{ - // Description: descriptions["backup_schedule"], - // Computed: true, - // }, - // "is_deletable": schema.BoolAttribute{ - // Description: descriptions["is_deletable"], - // Computed: true, - // }, - // "flavor": schema.SingleNestedAttribute{ - // Computed: true, - // Attributes: map[string]schema.Attribute{ - // "id": schema.StringAttribute{ - // Computed: true, - // }, - // "description": schema.StringAttribute{ - // Computed: true, - // }, - // "cpu": schema.Int64Attribute{ - // Computed: true, - // }, - // "ram": schema.Int64Attribute{ - // Computed: true, - // }, - // "node_type": schema.StringAttribute{ - // Computed: true, - // }, - // }, - // }, - // "replicas": schema.Int64Attribute{ - // Computed: true, - // }, - // "storage": schema.SingleNestedAttribute{ - // Computed: true, - // Attributes: map[string]schema.Attribute{ - // "class": schema.StringAttribute{ - // Computed: true, - // }, - // "size": schema.Int64Attribute{ - // Computed: true, - // }, - // }, - // }, - // "version": schema.StringAttribute{ - // Computed: true, - // }, - // "status": schema.StringAttribute{ - // Computed: true, - // }, - // "edition": schema.StringAttribute{ - // Computed: true, - // }, - // "retention_days": schema.Int64Attribute{ - // Computed: true, - // }, - // "region": schema.StringAttribute{ - // // the region cannot be found, so it has to be passed - // Optional: true, - // Description: descriptions["region"], - // }, - // "encryption": schema.SingleNestedAttribute{ - // Computed: true, - // Attributes: map[string]schema.Attribute{ - // "key_id": schema.StringAttribute{ - // Description: descriptions["key_id"], - // Computed: true, - // }, - // "key_version": schema.StringAttribute{ - // Description: descriptions["key_version"], - // Computed: true, - // }, - // "keyring_id": schema.StringAttribute{ - // Description: descriptions["keyring_id"], - // Computed: true, - // }, - // "service_account": schema.StringAttribute{ - // Description: descriptions["service_account"], - // Computed: true, - // }, - // }, - // Description: descriptions["encryption"], - // }, - // "network": schema.SingleNestedAttribute{ - // Computed: true, - // Attributes: map[string]schema.Attribute{ - // "access_scope": schema.StringAttribute{ - // Description: descriptions["access_scope"], - // Computed: true, - // }, - // "instance_address": schema.StringAttribute{ - // Description: descriptions["instance_address"], - // Computed: true, - // }, - // "router_address": schema.StringAttribute{ - // Description: descriptions["router_address"], - // Computed: true, - // }, - // "acl": schema.ListAttribute{ - // Description: descriptions["acl"], - // ElementType: types.StringType, - // Computed: true, - // }, - // }, - // Description: descriptions["network"], - // }, - // }, - //} + resp.Schema = s } // Read refreshes the Terraform state with the latest data. -func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform - //var model sqlserverflexalpha2.InstanceModel - var model sqlserverflexalpha2.InstanceModel +func (r *instanceDataSource) Read( + ctx context.Context, + req datasource.ReadRequest, + resp *datasource.ReadResponse, +) { // nolint:gocritic // function signature required by Terraform + var model dataSourceModel diags := req.Config.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -279,10 +150,15 @@ func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadReques // } //} - err = mapResponseToModel(ctx, instanceResp, &model, resp.Diagnostics) + err = mapFields(ctx, instanceResp, &model, resp.Diagnostics) //err = mapFields(ctx, instanceResp, &model, storage, encryption, network, region) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", fmt.Sprintf("Processing API payload: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error reading instance", + fmt.Sprintf("Processing API payload: %v", err), + ) return } // Set refreshed state diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go index dcf7f6dd..5880a392 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go @@ -65,7 +65,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema { Description: "The id of the instance flavor.", MarkdownDescription: "The id of the instance flavor.", }, - "id": schema.StringAttribute{ + "tf_original_api_id": schema.StringAttribute{ Computed: true, Description: "The ID of the instance.", MarkdownDescription: "The ID of the instance.", @@ -178,7 +178,7 @@ type InstanceModel struct { Edition types.String `tfsdk:"edition"` Encryption EncryptionValue `tfsdk:"encryption"` FlavorId types.String `tfsdk:"flavor_id"` - Id types.String `tfsdk:"id"` + Id types.String `tfsdk:"tf_original_api_id"` InstanceId types.String `tfsdk:"instance_id"` IsDeletable types.Bool `tfsdk:"is_deletable"` Name types.String `tfsdk:"name"` diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go index 783d95e1..77effc6c 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/functions.go @@ -14,26 +14,21 @@ import ( sqlserverflexResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" ) -func mapResponseToModel( +// instanceModel is a type constraint for models that can be mapped from a GetInstanceResponse. +type instanceModel interface { + *dataSourceModel | *resourceModel +} + +func mapFields[T instanceModel]( ctx context.Context, resp *sqlserverflex.GetInstanceResponse, - m *sqlserverflexResGen.InstanceModel, + m T, tfDiags diag.Diagnostics, ) error { - m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) - m.Edition = types.StringValue(string(resp.GetEdition())) - m.Encryption = handleEncryption(m, resp) - m.FlavorId = types.StringValue(resp.GetFlavorId()) - m.Id = types.StringValue(resp.GetId()) - m.InstanceId = types.StringValue(resp.GetId()) - m.IsDeletable = types.BoolValue(resp.GetIsDeletable()) - m.Name = types.StringValue(resp.GetName()) netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl()) tfDiags.Append(diags...) if diags.HasError() { - return fmt.Errorf( - "error converting network acl response value", - ) + return fmt.Errorf("error converting network acl response value") } net, diags := sqlserverflexResGen.NewNetworkValue( sqlserverflexResGen.NetworkValue{}.AttributeTypes(ctx), @@ -46,22 +41,8 @@ func mapResponseToModel( ) tfDiags.Append(diags...) if diags.HasError() { - return fmt.Errorf( - "error converting network response value", - "access_scope", - types.StringValue(string(resp.Network.GetAccessScope())), - "acl", - netAcl, - "instance_address", - types.StringValue(resp.Network.GetInstanceAddress()), - "router_address", - types.StringValue(resp.Network.GetRouterAddress()), - ) + return fmt.Errorf("error converting network response value") } - m.Network = net - m.Replicas = types.Int64Value(int64(resp.GetReplicas())) - m.RetentionDays = types.Int64Value(resp.GetRetentionDays()) - m.Status = types.StringValue(string(resp.GetStatus())) stor, diags := sqlserverflexResGen.NewStorageValue( sqlserverflexResGen.StorageValue{}.AttributeTypes(ctx), @@ -74,14 +55,47 @@ func mapResponseToModel( if diags.HasError() { return fmt.Errorf("error converting storage response value") } - m.Storage = stor - m.Version = types.StringValue(string(resp.GetVersion())) + // The interface conversion is safe due to the type constraint. + model := any(m) + + if rm, ok := model.(*resourceModel); ok { + rm.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) + rm.Edition = types.StringValue(string(resp.GetEdition())) + rm.Encryption = handleEncryption(rm.Encryption, resp) + rm.FlavorId = types.StringValue(resp.GetFlavorId()) + rm.Id = types.StringValue(resp.GetId()) + rm.InstanceId = types.StringValue(resp.GetId()) + rm.IsDeletable = types.BoolValue(resp.GetIsDeletable()) + rm.Name = types.StringValue(resp.GetName()) + rm.Network = net + rm.Replicas = types.Int64Value(int64(resp.GetReplicas())) + rm.RetentionDays = types.Int64Value(resp.GetRetentionDays()) + rm.Status = types.StringValue(string(resp.GetStatus())) + rm.Storage = stor + rm.Version = types.StringValue(string(resp.GetVersion())) + } else if dm, ok := model.(*dataSourceModel); ok { + dm.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) + dm.Edition = types.StringValue(string(resp.GetEdition())) + dm.Encryption = handleEncryption(dm.Encryption, resp) + dm.FlavorId = types.StringValue(resp.GetFlavorId()) + dm.Id = types.StringValue(resp.GetId()) + dm.InstanceId = types.StringValue(resp.GetId()) + dm.IsDeletable = types.BoolValue(resp.GetIsDeletable()) + dm.Name = types.StringValue(resp.GetName()) + dm.Network = net + dm.Replicas = types.Int64Value(int64(resp.GetReplicas())) + dm.RetentionDays = types.Int64Value(resp.GetRetentionDays()) + dm.Status = types.StringValue(string(resp.GetStatus())) + dm.Storage = stor + dm.Version = types.StringValue(string(resp.GetVersion())) + } + return nil } func handleEncryption( - m *sqlserverflexResGen.InstanceModel, + encryptionValue sqlserverflexResGen.EncryptionValue, resp *sqlserverflex.GetInstanceResponse, ) sqlserverflexResGen.EncryptionValue { if !resp.HasEncryption() || @@ -91,10 +105,10 @@ func handleEncryption( resp.Encryption.KekKeyVersion == nil || resp.Encryption.ServiceAccount == nil { - if m.Encryption.IsNull() || m.Encryption.IsUnknown() { + if encryptionValue.IsNull() || encryptionValue.IsUnknown() { return sqlserverflexResGen.NewEncryptionValueNull() } - return m.Encryption + return encryptionValue } enc := sqlserverflexResGen.NewEncryptionValueNull() diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go index 9257c8df..76c73639 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go @@ -11,7 +11,6 @@ import ( "time" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" - postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" sqlserverflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" @@ -37,23 +36,26 @@ var ( _ resource.ResourceWithIdentity = &instanceResource{} ) +// NewInstanceResource is a helper function to simplify the provider implementation. +func NewInstanceResource() resource.Resource { + return &instanceResource{} +} + //nolint:unused // TODO: remove if not needed later var validNodeTypes []string = []string{ "Single", "Replica", } +// resourceModel describes the resource data model. +type resourceModel = sqlserverflexalpha2.InstanceModel + type InstanceResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` InstanceID types.String `tfsdk:"instance_id"` } -// NewInstanceResource is a helper function to simplify the provider implementation. -func NewInstanceResource() resource.Resource { - return &instanceResource{} -} - // instanceResource is the resource implementation. type instanceResource struct { client *sqlserverflexalpha.APIClient @@ -140,270 +142,28 @@ var modifiersFileByte []byte // Schema defines the schema for the resource. func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - //descriptions := map[string]string{ - // "main": "SQLServer Flex ALPHA instance resource schema. Must have a `region` specified in the provider configuration.", - // "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`\".", - // "instance_id": "ID of the SQLServer Flex instance.", - // "project_id": "STACKIT project ID to which the instance is associated.", - // "name": "Instance name.", - // "access_scope": "The access scope of the instance. (SNA | PUBLIC)", - // "flavor_id": "The flavor ID of the instance.", - // "acl": "The Access Control List (ACL) for the SQLServer Flex instance.", - // "backup_schedule": `The backup schedule. Should follow the cron scheduling system format (e.g. "0 0 * * *")`, - // "region": "The resource region. If not defined, the provider region is used.", - // "encryption": "The encryption block.", - // "replicas": "The number of replicas of the SQLServer Flex instance.", - // "network": "The network block.", - // "keyring_id": "STACKIT KMS - KeyRing ID of the encryption key to use.", - // "key_id": "STACKIT KMS - Key ID of the encryption key to use.", - // "key_version": "STACKIT KMS - Key version to use in the encryption key.", - // "service:account": "STACKIT KMS - service account to use in the encryption key.", - // "instance_address": "The returned instance IP address of the SQLServer Flex instance.", - // "router_address": "The returned router IP address of the SQLServer Flex instance.", - //} schema := sqlserverflexalpha2.InstanceResourceSchema(ctx) - fields, err := postgresflexUtils.ReadModifiersConfig(modifiersFileByte) + fields, err := utils.ReadModifiersConfig(modifiersFileByte) if err != nil { resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) return } - err = postgresflexUtils.AddPlanModifiersToResourceSchema(fields, &schema) + err = utils.AddPlanModifiersToResourceSchema(fields, &schema) if err != nil { resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) return } resp.Schema = schema - - //resp.Schema = schema.Schema{ - // Description: descriptions["main"], - // Attributes: map[string]schema.Attribute{ - // "id": schema.StringAttribute{ - // Description: descriptions["id"], - // Computed: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.UseStateForUnknown(), - // }, - // }, - // "instance_id": schema.StringAttribute{ - // Description: descriptions["instance_id"], - // Computed: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.UseStateForUnknown(), - // }, - // Validators: []validator.String{ - // validate.UUID(), - // validate.NoSeparator(), - // }, - // }, - // "project_id": schema.StringAttribute{ - // Description: descriptions["project_id"], - // Required: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // }, - // Validators: []validator.String{ - // validate.UUID(), - // validate.NoSeparator(), - // }, - // }, - // "name": schema.StringAttribute{ - // Description: descriptions["name"], - // Required: true, - // Validators: []validator.String{ - // stringvalidator.LengthAtLeast(1), - // stringvalidator.RegexMatches( - // regexp.MustCompile("^[a-z]([-a-z0-9]*[a-z0-9])?$"), - // "must start with a letter, must have lower case letters, numbers or hyphens, and no hyphen at the end", - // ), - // }, - // }, - // "backup_schedule": schema.StringAttribute{ - // Description: descriptions["backup_schedule"], - // Optional: true, - // Computed: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.UseStateForUnknown(), - // }, - // }, - // "is_deletable": schema.BoolAttribute{ - // Description: descriptions["is_deletable"], - // Optional: true, - // Computed: true, - // PlanModifiers: []planmodifier.Bool{ - // boolplanmodifier.UseStateForUnknown(), - // }, - // }, - // "flavor_id": schema.StringAttribute{ - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // stringplanmodifier.UseStateForUnknown(), - // }, - // Required: true, - // }, - // "replicas": schema.Int64Attribute{ - // Computed: true, - // PlanModifiers: []planmodifier.Int64{ - // int64planmodifier.UseStateForUnknown(), - // }, - // }, - // "storage": schema.SingleNestedAttribute{ - // Optional: true, - // Computed: true, - // PlanModifiers: []planmodifier.Object{ - // objectplanmodifier.UseStateForUnknown(), - // }, - // Attributes: map[string]schema.Attribute{ - // "class": schema.StringAttribute{ - // Optional: true, - // Computed: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // stringplanmodifier.UseStateForUnknown(), - // }, - // }, - // "size": schema.Int64Attribute{ - // Optional: true, - // Computed: true, - // PlanModifiers: []planmodifier.Int64{ - // int64planmodifier.UseStateForUnknown(), - // }, - // }, - // }, - // }, - // "version": schema.StringAttribute{ - // Optional: true, - // Computed: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // stringplanmodifier.UseStateForUnknown(), - // }, - // }, - // "edition": schema.StringAttribute{ - // Computed: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // stringplanmodifier.UseStateForUnknown(), - // }, - // }, - // "retention_days": schema.Int64Attribute{ - // Optional: true, - // Computed: true, - // PlanModifiers: []planmodifier.Int64{ - // int64planmodifier.UseStateForUnknown(), - // }, - // }, - // "region": schema.StringAttribute{ - // Optional: true, - // // must be computed to allow for storing the override value from the provider - // Computed: true, - // Description: descriptions["region"], - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // }, - // }, - // "status": schema.StringAttribute{ - // Optional: true, - // // must be computed to allow for storing the override value from the provider - // Computed: true, - // Description: descriptions["status"], - // }, - // "encryption": schema.SingleNestedAttribute{ - // Optional: true, - // PlanModifiers: []planmodifier.Object{ - // objectplanmodifier.RequiresReplace(), - // objectplanmodifier.UseStateForUnknown(), - // }, - // Attributes: map[string]schema.Attribute{ - // "key_id": schema.StringAttribute{ - // Description: descriptions["key_id"], - // Required: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // }, - // Validators: []validator.String{ - // validate.NoSeparator(), - // }, - // }, - // "key_version": schema.StringAttribute{ - // Description: descriptions["key_version"], - // Required: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // }, - // Validators: []validator.String{ - // validate.NoSeparator(), - // }, - // }, - // "keyring_id": schema.StringAttribute{ - // Description: descriptions["keyring_id"], - // Required: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // }, - // Validators: []validator.String{ - // validate.NoSeparator(), - // }, - // }, - // "service_account": schema.StringAttribute{ - // Description: descriptions["service_account"], - // Required: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // }, - // Validators: []validator.String{ - // validate.NoSeparator(), - // }, - // }, - // }, - // Description: descriptions["encryption"], - // }, - // "network": schema.SingleNestedAttribute{ - // Required: true, - // Attributes: map[string]schema.Attribute{ - // "access_scope": schema.StringAttribute{ - // Description: descriptions["access_scope"], - // Required: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.RequiresReplace(), - // stringplanmodifier.UseStateForUnknown(), - // }, - // Validators: []validator.String{ - // validate.NoSeparator(), - // }, - // }, - // "acl": schema.ListAttribute{ - // Description: descriptions["acl"], - // ElementType: types.StringType, - // Required: true, - // PlanModifiers: []planmodifier.List{ - // listplanmodifier.UseStateForUnknown(), - // }, - // }, - // "instance_address": schema.StringAttribute{ - // Description: descriptions["instance_address"], - // Computed: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.UseStateForUnknown(), - // }, - // }, - // "router_address": schema.StringAttribute{ - // Description: descriptions["router_address"], - // Computed: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.UseStateForUnknown(), - // }, - // }, - // }, - // Description: descriptions["network"], - // }, - // }, - //} } -func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { +func (r *instanceResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + resp *resource.IdentitySchemaResponse, +) { resp.IdentitySchema = identityschema.Schema{ Attributes: map[string]identityschema.Attribute{ "project_id": identityschema.StringAttribute{ @@ -425,7 +185,7 @@ func (r *instanceResource) Create( req resource.CreateRequest, resp *resource.CreateResponse, ) { // nolint:gocritic // function signature required by Terraform - var model sqlserverflexalpha2.InstanceModel + var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -528,7 +288,7 @@ func (r *instanceResource) Create( // Map response body to schema // err = mapFields(ctx, waitResp, &model, storage, encryption, network, region) - err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics) + err = mapFields(ctx, waitResp, &model, resp.Diagnostics) if err != nil { core.LogAndAddError( ctx, @@ -554,7 +314,7 @@ func (r *instanceResource) Read( req resource.ReadRequest, resp *resource.ReadResponse, ) { // nolint:gocritic // function signature required by Terraform - var model sqlserverflexalpha2.InstanceModel + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -592,7 +352,7 @@ func (r *instanceResource) Read( ctx = core.LogResponse(ctx) // Map response body to schema - err = mapResponseToModel(ctx, instanceResp, &model, resp.Diagnostics) + err = mapFields(ctx, instanceResp, &model, resp.Diagnostics) if err != nil { core.LogAndAddError( ctx, @@ -629,7 +389,7 @@ func (r *instanceResource) Update( resp *resource.UpdateResponse, ) { // nolint:gocritic // function signature required by Terraform // Retrieve values from plan - var model sqlserverflexalpha2.InstanceModel + var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -683,7 +443,7 @@ func (r *instanceResource) Update( } // Map response body to schema - err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics) + err = mapFields(ctx, waitResp, &model, resp.Diagnostics) // err = mapFields(ctx, waitResp, &model, storage, encryption, network, region) if err != nil { core.LogAndAddError( @@ -709,7 +469,7 @@ func (r *instanceResource) Delete( resp *resource.DeleteResponse, ) { // nolint:gocritic // function signature required by Terraform // Retrieve values from state - var model sqlserverflexalpha2.InstanceModel + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -754,20 +514,41 @@ func (r *instanceResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - // TODO - idParts := strings.Split(req.ID, core.Separator) + if req.ID != "" { + idParts := strings.Split(req.ID, core.Separator) - if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" { - core.LogAndAddError( - ctx, &resp.Diagnostics, - "Error importing instance", - fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID), - ) + if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" { + core.LogAndAddError( + ctx, &resp.Diagnostics, + "Error importing instance", + fmt.Sprintf( + "Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", + req.ID, + ), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) return } - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) + // If no ID is provided, attempt to read identity attributes from the import configuration + var identityData InstanceResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) + tflog.Info(ctx, "SQLServer Flex instance state imported") } diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource.go b/stackit/internal/services/sqlserverflexalpha/user/datasource.go index 9b083db0..282a713c 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/datasource.go @@ -28,7 +28,12 @@ var ( _ datasource.DataSource = &userDataSource{} ) -type DataSourceModel struct { +// NewUserDataSource is a helper function to simplify the provider implementation. +func NewUserDataSource() datasource.DataSource { + return &userDataSource{} +} + +type dataSourceModel struct { Id types.String `tfsdk:"id"` // needed by TF UserId types.Int64 `tfsdk:"user_id"` InstanceId types.String `tfsdk:"instance_id"` @@ -42,11 +47,6 @@ type DataSourceModel struct { DefaultDatabase types.String `tfsdk:"default_database"` } -// NewUserDataSource is a helper function to simplify the provider implementation. -func NewUserDataSource() datasource.DataSource { - return &userDataSource{} -} - // userDataSource is the data source implementation. type userDataSource struct { client *sqlserverflexalpha.APIClient @@ -164,7 +164,7 @@ func (r *userDataSource) Read( req datasource.ReadRequest, resp *datasource.ReadResponse, ) { // nolint:gocritic // function signature required by Terraform - var model DataSourceModel + var model dataSourceModel diags := req.Config.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -226,7 +226,7 @@ func (r *userDataSource) Read( tflog.Info(ctx, "SQLServer Flex instance read") } -func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *DataSourceModel, region string) error { +func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *dataSourceModel, region string) error { if userResp == nil { return fmt.Errorf("response is nil") } diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go b/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go index b98c2e53..bd1fa093 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go +++ b/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go @@ -16,14 +16,14 @@ func TestMapDataSourceFields(t *testing.T) { description string input *sqlserverflexalpha.GetUserResponse region string - expected DataSourceModel + expected dataSourceModel isValid bool }{ { "default_values", &sqlserverflexalpha.GetUserResponse{}, testRegion, - DataSourceModel{ + dataSourceModel{ Id: types.StringValue("pid,region,iid,1"), UserId: types.Int64Value(1), InstanceId: types.StringValue("iid"), @@ -54,7 +54,7 @@ func TestMapDataSourceFields(t *testing.T) { DefaultDatabase: utils.Ptr("default_db"), }, testRegion, - DataSourceModel{ + dataSourceModel{ Id: types.StringValue("pid,region,iid,1"), UserId: types.Int64Value(1), InstanceId: types.StringValue("iid"), @@ -85,7 +85,7 @@ func TestMapDataSourceFields(t *testing.T) { Port: utils.Ptr(int64(2123456789)), }, testRegion, - DataSourceModel{ + dataSourceModel{ Id: types.StringValue("pid,region,iid,1"), UserId: types.Int64Value(1), InstanceId: types.StringValue("iid"), @@ -102,28 +102,28 @@ func TestMapDataSourceFields(t *testing.T) { "nil_response", nil, testRegion, - DataSourceModel{}, + dataSourceModel{}, false, }, { "nil_response_2", &sqlserverflexalpha.GetUserResponse{}, testRegion, - DataSourceModel{}, + dataSourceModel{}, false, }, { "no_resource_id", &sqlserverflexalpha.GetUserResponse{}, testRegion, - DataSourceModel{}, + dataSourceModel{}, false, }, } for _, tt := range tests { t.Run( tt.description, func(t *testing.T) { - state := &DataSourceModel{ + state := &dataSourceModel{ ProjectId: tt.expected.ProjectId, InstanceId: tt.expected.InstanceId, UserId: tt.expected.UserId, diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go index 3d252237..329469ea 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go +++ b/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go @@ -98,7 +98,7 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema { "users": schema.ListNestedAttribute{ NestedObject: schema.NestedAttributeObject{ Attributes: map[string]schema.Attribute{ - "id": schema.Int64Attribute{ + "tf_original_api_id": schema.Int64Attribute{ Computed: true, Description: "The ID of the user.", MarkdownDescription: "The ID of the user.", diff --git a/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml new file mode 100644 index 00000000..b01aae98 --- /dev/null +++ b/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml @@ -0,0 +1,58 @@ +fields: + - name: 'id' + modifiers: + - 'UseStateForUnknown' + + - name: 'user_id' + modifiers: + - 'UseStateForUnknown' + + - name: 'instance_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'RequiresReplace' + + - name: 'project_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'RequiresReplace' + + - name: 'username' + modifiers: + - 'RequiresReplace' + + - name: 'roles' + modifiers: + - 'RequiresReplace' + + - name: 'password' + modifiers: + - 'UseStateForUnknown' + + - name: 'host' + modifiers: + - 'UseStateForUnknown' + + - name: 'port' + modifiers: + - 'UseStateForUnknown' + + - name: 'region' + modifiers: + - 'RequiresReplace' + + - name: 'status' + modifiers: + - 'UseStateForUnknown' + + - name: 'uri' + modifiers: + - 'UseStateForUnknown' + + - name: 'default_database' + modifiers: + - 'UseStateForUnknown' diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index c5cea986..41e6e1c5 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -2,33 +2,27 @@ package sqlserverflexalpha import ( "context" + _ "embed" "errors" "fmt" "net/http" "strconv" "strings" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + sqlserverflexalphagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen" sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-log/tflog" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate" - "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) // Ensure the implementation satisfies the expected interfaces. @@ -39,26 +33,22 @@ var ( _ resource.ResourceWithModifyPlan = &userResource{} ) -type Model struct { - Id types.String `tfsdk:"id"` // needed by TF - UserId types.Int64 `tfsdk:"user_id"` - InstanceId types.String `tfsdk:"instance_id"` - ProjectId types.String `tfsdk:"project_id"` - Username types.String `tfsdk:"username"` - Roles types.Set `tfsdk:"roles"` - Password types.String `tfsdk:"password"` - Host types.String `tfsdk:"host"` - Port types.Int64 `tfsdk:"port"` - Region types.String `tfsdk:"region"` - Status types.String `tfsdk:"status"` - DefaultDatabase types.String `tfsdk:"default_database"` -} - // NewUserResource is a helper function to simplify the provider implementation. func NewUserResource() resource.Resource { return &userResource{} } +// resourceModel describes the resource data model. +type resourceModel = sqlserverflexalphagen.UserModel + +// UserResourceIdentityModel describes the resource's identity attributes. +type UserResourceIdentityModel struct { + ProjectID types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + InstanceID types.String `tfsdk:"instance_id"` + UserID types.Int64 `tfsdk:"user_id"` +} + // userResource is the resource implementation. type userResource struct { client *sqlserverflexalpha.APIClient @@ -93,7 +83,7 @@ func (r *userResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - var configModel Model + var configModel resourceModel // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return @@ -103,7 +93,7 @@ func (r *userResource) ModifyPlan( return } - var planModel Model + var planModel resourceModel resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) if resp.Diagnostics.HasError() { return @@ -120,107 +110,25 @@ func (r *userResource) ModifyPlan( } } +//go:embed planModifiers.yaml +var modifiersFileByte []byte + // Schema defines the schema for the resource. -func (r *userResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - descriptions := map[string]string{ - "main": "SQLServer Flex user resource schema. Must have a `region` specified in the provider configuration.", - "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".", - "user_id": "User ID.", - "instance_id": "ID of the SQLServer Flex instance.", - "project_id": "STACKIT project ID to which the instance is associated.", - "username": "Username of the SQLServer Flex instance.", - "roles": "Database access levels for the user. The values for the default roles are: `##STACKIT_DatabaseManager##`, `##STACKIT_LoginManager##`, `##STACKIT_ProcessManager##`, `##STACKIT_ServerManager##`, `##STACKIT_SQLAgentManager##`, `##STACKIT_SQLAgentUser##`", - "password": "Password of the user account.", - "status": "Status of the user.", - "default_database": "Default database of the user.", +func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + s := sqlserverflexalphagen.UserResourceSchema(ctx) + + fields, err := utils.ReadModifiersConfig(modifiersFileByte) + if err != nil { + resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) + return } - resp.Schema = schema.Schema{ - Description: descriptions["main"], - Attributes: map[string]schema.Attribute{ - "id": schema.StringAttribute{ - Description: descriptions["id"], - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, - "user_id": schema.Int64Attribute{ - Description: descriptions["user_id"], - Computed: true, - PlanModifiers: []planmodifier.Int64{ - int64planmodifier.UseStateForUnknown(), - }, - Validators: []validator.Int64{}, - }, - "instance_id": schema.StringAttribute{ - Description: descriptions["instance_id"], - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "project_id": schema.StringAttribute{ - Description: descriptions["project_id"], - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "username": schema.StringAttribute{ - Description: descriptions["username"], - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - }, - "roles": schema.SetAttribute{ - Description: descriptions["roles"], - ElementType: types.StringType, - Required: true, - PlanModifiers: []planmodifier.Set{ - setplanmodifier.RequiresReplace(), - }, - }, - "password": schema.StringAttribute{ - Description: descriptions["password"], - Computed: true, - Sensitive: true, - }, - "host": schema.StringAttribute{ - Computed: true, - }, - "port": schema.Int64Attribute{ - Computed: true, - }, - "region": schema.StringAttribute{ - Optional: true, - // must be computed to allow for storing the override value from the provider - Computed: true, - Description: descriptions["region"], - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "status": schema.StringAttribute{ - Computed: true, - }, - "default_database": schema.StringAttribute{ - Computed: true, - }, - }, + err = utils.AddPlanModifiersToResourceSchema(fields, &s) + if err != nil { + resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) + return } + resp.Schema = s } // Create creates the resource and sets the initial Terraform state. @@ -229,7 +137,7 @@ func (r *userResource) Create( req resource.CreateRequest, resp *resource.CreateResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -313,7 +221,7 @@ func (r *userResource) Read( req resource.ReadRequest, resp *resource.ReadResponse, ) { // nolint:gocritic // function signature required by Terraform - var model Model + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -387,7 +295,7 @@ func (r *userResource) Delete( resp *resource.DeleteResponse, ) { // nolint:gocritic // function signature required by Terraform // Retrieve values from plan - var model Model + var model resourceModel diags := req.State.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -429,23 +337,63 @@ func (r *userResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - idParts := strings.Split(req.ID, core.Separator) - if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { - core.LogAndAddError( - ctx, &resp.Diagnostics, - "Error importing user", - fmt.Sprintf( - "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q", - req.ID, - ), - ) + + ctx = core.InitProviderContext(ctx) + + if req.ID != "" { + + idParts := strings.Split(req.ID, core.Separator) + + if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { + core.LogAndAddError( + ctx, &resp.Diagnostics, + "Error importing user", + fmt.Sprintf( + "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q", + req.ID, + ), + ) + return + } + + userId, err := strconv.ParseInt(idParts[3], 10, 64) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error importing user", + fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...) + + tflog.Info(ctx, "Postgres Flex user state imported") + return } - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), idParts[3])...) + // If no ID is provided, attempt to read identity attributes from the import configuration + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + userId := identityData.UserID.ValueInt64() + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...) + core.LogAndAddWarning( ctx, &resp.Diagnostics, @@ -455,7 +403,7 @@ func (r *userResource) ImportState( tflog.Info(ctx, "SQLServer Flex user state imported") } -func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *Model, region string) error { +func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *resourceModel, region string) error { if userResp == nil { return fmt.Errorf("response is nil") } @@ -468,12 +416,6 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *Mod return fmt.Errorf("user id not present") } userId := *user.Id - model.Id = utils.BuildInternalTerraformId( - model.ProjectId.ValueString(), - region, - model.InstanceId.ValueString(), - strconv.FormatInt(userId, 10), - ) model.UserId = types.Int64Value(userId) model.Username = types.StringPointerValue(user.Username) @@ -491,11 +433,11 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *Mod if diags.HasError() { return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) } - model.Roles = rolesSet + model.Roles = types.List(rolesSet) } if model.Roles.IsNull() || model.Roles.IsUnknown() { - model.Roles = types.SetNull(types.StringType) + model.Roles = types.List(types.SetNull(types.StringType)) } model.Host = types.StringPointerValue(user.Host) @@ -507,7 +449,7 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *Mod return nil } -func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *Model, region string) error { +func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *resourceModel, region string) error { if userResp == nil { return fmt.Errorf("response is nil") } @@ -524,12 +466,7 @@ func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *Model, regio } else { return fmt.Errorf("user id not present") } - model.Id = utils.BuildInternalTerraformId( - model.ProjectId.ValueString(), - region, - model.InstanceId.ValueString(), - strconv.FormatInt(userId, 10), - ) + model.UserId = types.Int64Value(userId) model.Username = types.StringPointerValue(user.Username) @@ -542,11 +479,11 @@ func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *Model, regio if diags.HasError() { return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) } - model.Roles = rolesSet + model.Roles = types.List(rolesSet) } if model.Roles.IsNull() || model.Roles.IsUnknown() { - model.Roles = types.SetNull(types.StringType) + model.Roles = types.List(types.SetNull(types.StringType)) } model.Host = types.StringPointerValue(user.Host) @@ -556,7 +493,7 @@ func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *Model, regio } func toCreatePayload( - model *Model, + model *resourceModel, roles []sqlserverflexalpha.UserRole, ) (*sqlserverflexalpha.CreateUserRequestPayload, error) { if model == nil { diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource_test.go b/stackit/internal/services/sqlserverflexalpha/user/resource_test.go index ad6bbf5a..e7ddddb1 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource_test.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource_test.go @@ -16,7 +16,7 @@ func TestMapFieldsCreate(t *testing.T) { description string input *sqlserverflexalpha.CreateUserResponse region string - expected Model + expected resourceModel isValid bool }{ { @@ -26,13 +26,13 @@ func TestMapFieldsCreate(t *testing.T) { Password: utils.Ptr(""), }, testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), + resourceModel{ + Id: types.Int64Value(1), UserId: types.Int64Value(1), InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), Username: types.StringNull(), - Roles: types.SetNull(types.StringType), + Roles: types.List(types.SetNull(types.StringType)), Password: types.StringValue(""), Host: types.StringNull(), Port: types.Int64Null(), @@ -57,18 +57,20 @@ func TestMapFieldsCreate(t *testing.T) { DefaultDatabase: utils.Ptr("default_db"), }, testRegion, - Model{ - Id: types.StringValue("pid,region,iid,2"), + resourceModel{ + Id: types.Int64Value(2), UserId: types.Int64Value(2), InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), Username: types.StringValue("username"), - Roles: types.SetValueMust( - types.StringType, []attr.Value{ - types.StringValue("role_1"), - types.StringValue("role_2"), - types.StringValue(""), - }, + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), ), Password: types.StringValue("password"), Host: types.StringValue("host"), @@ -90,13 +92,13 @@ func TestMapFieldsCreate(t *testing.T) { Port: utils.Ptr(int64(2123456789)), }, testRegion, - Model{ - Id: types.StringValue("pid,region,iid,3"), + resourceModel{ + Id: types.Int64Value(3), UserId: types.Int64Value(3), InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), Username: types.StringNull(), - Roles: types.SetValueMust(types.StringType, []attr.Value{}), + Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), Password: types.StringValue(""), Host: types.StringNull(), Port: types.Int64Value(2123456789), @@ -110,21 +112,21 @@ func TestMapFieldsCreate(t *testing.T) { "nil_response", nil, testRegion, - Model{}, + resourceModel{}, false, }, { "nil_response_2", &sqlserverflexalpha.CreateUserResponse{}, testRegion, - Model{}, + resourceModel{}, false, }, { "no_resource_id", &sqlserverflexalpha.CreateUserResponse{}, testRegion, - Model{}, + resourceModel{}, false, }, { @@ -133,14 +135,14 @@ func TestMapFieldsCreate(t *testing.T) { Id: utils.Ptr(int64(1)), }, testRegion, - Model{}, + resourceModel{}, false, }, } for _, tt := range tests { t.Run( tt.description, func(t *testing.T) { - state := &Model{ + state := &resourceModel{ ProjectId: tt.expected.ProjectId, InstanceId: tt.expected.InstanceId, } @@ -168,20 +170,20 @@ func TestMapFields(t *testing.T) { description string input *sqlserverflexalpha.GetUserResponse region string - expected Model + expected resourceModel isValid bool }{ { "default_values", &sqlserverflexalpha.GetUserResponse{}, testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), + resourceModel{ + Id: types.Int64Value(1), UserId: types.Int64Value(1), InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), Username: types.StringNull(), - Roles: types.SetNull(types.StringType), + Roles: types.List(types.SetNull(types.StringType)), Host: types.StringNull(), Port: types.Int64Null(), Region: types.StringValue(testRegion), @@ -201,18 +203,20 @@ func TestMapFields(t *testing.T) { Port: utils.Ptr(int64(1234)), }, testRegion, - Model{ - Id: types.StringValue("pid,region,iid,2"), + resourceModel{ + Id: types.Int64Value(2), UserId: types.Int64Value(2), InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), Username: types.StringValue("username"), - Roles: types.SetValueMust( - types.StringType, []attr.Value{ - types.StringValue("role_1"), - types.StringValue("role_2"), - types.StringValue(""), - }, + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), ), Host: types.StringValue("host"), Port: types.Int64Value(1234), @@ -230,13 +234,13 @@ func TestMapFields(t *testing.T) { Port: utils.Ptr(int64(2123456789)), }, testRegion, - Model{ - Id: types.StringValue("pid,region,iid,1"), + resourceModel{ + Id: types.Int64Value(1), UserId: types.Int64Value(1), InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), Username: types.StringNull(), - Roles: types.SetValueMust(types.StringType, []attr.Value{}), + Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), Host: types.StringNull(), Port: types.Int64Value(2123456789), Region: types.StringValue(testRegion), @@ -247,28 +251,28 @@ func TestMapFields(t *testing.T) { "nil_response", nil, testRegion, - Model{}, + resourceModel{}, false, }, { "nil_response_2", &sqlserverflexalpha.GetUserResponse{}, testRegion, - Model{}, + resourceModel{}, false, }, { "no_resource_id", &sqlserverflexalpha.GetUserResponse{}, testRegion, - Model{}, + resourceModel{}, false, }, } for _, tt := range tests { t.Run( tt.description, func(t *testing.T) { - state := &Model{ + state := &resourceModel{ ProjectId: tt.expected.ProjectId, InstanceId: tt.expected.InstanceId, UserId: tt.expected.UserId, @@ -294,14 +298,14 @@ func TestMapFields(t *testing.T) { func TestToCreatePayload(t *testing.T) { tests := []struct { description string - input *Model + input *resourceModel inputRoles []sqlserverflexalpha.UserRole expected *sqlserverflexalpha.CreateUserRequestPayload isValid bool }{ { "default_values", - &Model{}, + &resourceModel{}, []sqlserverflexalpha.UserRole{}, &sqlserverflexalpha.CreateUserRequestPayload{ Roles: &[]sqlserverflexalpha.UserRole{}, @@ -311,7 +315,7 @@ func TestToCreatePayload(t *testing.T) { }, { "default_values", - &Model{ + &resourceModel{ Username: types.StringValue("username"), }, []sqlserverflexalpha.UserRole{ @@ -329,7 +333,7 @@ func TestToCreatePayload(t *testing.T) { }, { "null_fields_and_int_conversions", - &Model{ + &resourceModel{ Username: types.StringNull(), }, []sqlserverflexalpha.UserRole{ @@ -352,7 +356,7 @@ func TestToCreatePayload(t *testing.T) { }, { "nil_roles", - &Model{ + &resourceModel{ Username: types.StringValue("username"), }, []sqlserverflexalpha.UserRole{}, diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go index bb6c3038..063fe6d9 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/datasource.go @@ -26,17 +26,21 @@ func NewDatabaseDataSource() datasource.DataSource { return &databaseDataSource{} } +type dataSourceModel struct { + sqlserverflexbetaGen.DatabaseModel + TfId types.String `tfsdk:"id"` +} + type databaseDataSource struct { client *sqlserverflexbetaPkg.APIClient providerData core.ProviderData } -type dsModel struct { - sqlserverflexbetaGen.DatabaseModel - TfId types.String `tfsdk:"id"` -} - -func (d *databaseDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *databaseDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database" } @@ -92,7 +96,7 @@ func (d *databaseDataSource) Configure( } func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data dsModel + var data dataSourceModel readErr := "Read DB error" // Read Terraform configuration data into the model diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_fix.go b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go similarity index 100% rename from stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_fix.go rename to stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go diff --git a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml new file mode 100644 index 00000000..d6209230 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml @@ -0,0 +1,50 @@ +fields: + - name: 'id' + modifiers: + - 'UseStateForUnknown' + + - name: 'instance_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'RequiresReplace' + + - name: 'project_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'RequiresReplace' + + - name: 'region' + modifiers: + - 'RequiresReplace' + + - name: 'name' + modifiers: + - 'RequiresReplace' + + - name: 'collation' + modifiers: + - 'RequiresReplace' + + - name: 'owner' + modifiers: + - 'RequiresReplace' + + - name: 'database_name' + modifiers: + - 'UseStateForUnknown' + + - name: 'collation_name' + modifiers: + - 'UseStateForUnknown' + + - name: 'compatibility' + modifiers: + - 'RequiresReplace' + + - name: 'compatibility_level' + modifiers: + - 'UseStateForUnknown' diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 5ae1d6c4..9c052dbb 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -2,6 +2,7 @@ package sqlserverflexbeta import ( "context" + _ "embed" "fmt" "strings" "time" @@ -35,6 +36,9 @@ func NewDatabaseResource() resource.Resource { return &databaseResource{} } +// resourceModel describes the resource data model. +type resourceModel = sqlserverflexbetaResGen.DatabaseModel + type databaseResource struct { client *sqlserverflexbeta.APIClient providerData core.ProviderData @@ -47,15 +51,40 @@ type DatabaseResourceIdentityModel struct { DatabaseName types.String `tfsdk:"database_name"` } -func (r *databaseResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *databaseResource) Metadata( + ctx context.Context, + req resource.MetadataRequest, + resp *resource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database" } +//go:embed planModifiers.yaml +var modifiersFileByte []byte + func (r *databaseResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = sqlserverflexbetaResGen.DatabaseResourceSchema(ctx) + + s := sqlserverflexbetaResGen.DatabaseResourceSchema(ctx) + + fields, err := utils.ReadModifiersConfig(modifiersFileByte) + if err != nil { + resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) + return + } + + err = utils.AddPlanModifiersToResourceSchema(fields, &s) + if err != nil { + resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) + return + } + resp.Schema = s } -func (r *databaseResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { +func (r *databaseResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + resp *resource.IdentitySchemaResponse, +) { resp.IdentitySchema = identityschema.Schema{ Attributes: map[string]identityschema.Attribute{ "project_id": identityschema.StringAttribute{ @@ -91,7 +120,10 @@ func (r *databaseResource) Configure( utils.UserAgentConfigOption(r.providerData.Version), } if r.providerData.SQLServerFlexCustomEndpoint != "" { - apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint)) + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint), + ) } else { apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) } @@ -111,7 +143,7 @@ func (r *databaseResource) Configure( } func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data sqlserverflexbetaResGen.DatabaseModel + var data resourceModel createErr := "DB create error" // Read Terraform plan data into the model @@ -243,7 +275,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques } func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data sqlserverflexbetaResGen.DatabaseModel + var data resourceModel readErr := "[Database Read]" // Read Terraform prior state data into the model @@ -298,7 +330,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r } func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data sqlserverflexbetaResGen.DatabaseModel + var data resourceModel // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -329,7 +361,7 @@ func (r *databaseResource) Update(ctx context.Context, req resource.UpdateReques } func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data sqlserverflexbetaResGen.DatabaseModel + var data resourceModel // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -422,9 +454,13 @@ func (r *databaseResource) ImportState( idParts := strings.Split(req.ID, core.Separator) if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { - core.LogAndAddError(ctx, &resp.Diagnostics, + core.LogAndAddError( + ctx, &resp.Diagnostics, "Error importing database", - fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id],[database_name] Got: %q", req.ID), + fmt.Sprintf( + "Expected import identifier with format: [project_id],[region],[instance_id],[database_name] Got: %q", + req.ID, + ), ) return } @@ -449,21 +485,22 @@ func (r *databaseResource) ImportState( return } + // If no ID is provided, attempt to read identity attributes from the import configuration var identityData DatabaseResourceIdentityModel resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) if resp.Diagnostics.HasError() { return } - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), identityData.DatabaseName.ValueString())...) + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + databaseName := identityData.DatabaseName.ValueString() - resp.Diagnostics.Append(resp.Identity.Set(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseName)...) tflog.Info(ctx, "Sqlserverflexbeta database state imported") } diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go new file mode 100644 index 00000000..b401e4ff --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go @@ -0,0 +1,155 @@ +package sqlserverflexbeta + +import ( + "context" + "fmt" + "net/http" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + + sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen" +) + +var _ datasource.DataSource = (*flavorsDataSource)(nil) + +const errorPrefix = "[Sqlserverflexbeta - Flavors]" + +func NewFlavorsDataSource() datasource.DataSource { + return &flavorsDataSource{} +} + +type dataSourceModel struct { + sqlserverflexbetaGen.FlavorsModel + TerraformId types.String `tfsdk:"id"` +} + +type flavorsDataSource struct { + client *sqlserverflexbetaPkg.APIClient + providerData core.ProviderData +} + +func (d *flavorsDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { + resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavors" +} + +func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = sqlserverflexbetaGen.FlavorsDataSourceSchema(ctx) + resp.Schema.Attributes["id"] = schema.StringAttribute{ + Computed: true, + Description: "The terraform internal identifier.", + MarkdownDescription: "The terraform internal identifier.", + } +} + +// Configure adds the provider configured client to the data source. +func (d *flavorsDataSource) Configure( + ctx context.Context, + req datasource.ConfigureRequest, + resp *datasource.ConfigureResponse, +) { + var ok bool + d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(d.providerData.RoundTripper), + utils.UserAgentConfigOption(d.providerData.Version), + } + if d.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithRegion(d.providerData.GetRegion()), + ) + } + apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return + } + d.client = apiClient + tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) +} + +func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data dataSourceModel + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := data.ProjectId.ValueString() + region := d.providerData.GetRegionWithOverride(data.Region) + // TODO: implement right identifier for flavors + flavorsId := data.FlavorsModel.Flavors + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // TODO: implement needed fields + ctx = tflog.SetField(ctx, "flavors_id", flavorsId) + + // TODO: refactor to correct implementation + _, err := d.client.GetFlavorsRequest(ctx, projectId, region).Execute() + if err != nil { + utils.LogError( + ctx, + &resp.Diagnostics, + err, + "Reading flavors", + fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId), + map[int]string{ + http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), + }, + ) + resp.State.RemoveResource(ctx) + return + } + + ctx = core.LogResponse(ctx) + + // TODO: refactor to correct implementation of internal tf id + data.TerraformId = utils.BuildInternalTerraformId(projectId, region) + + // TODO: fill remaining fields + // data.Flavors = types.Sometype(apiResponse.GetFlavors()) + // data.Page = types.Sometype(apiResponse.GetPage()) + // data.Pagination = types.Sometype(apiResponse.GetPagination()) + // data.ProjectId = types.Sometype(apiResponse.GetProjectId()) + // data.Region = types.Sometype(apiResponse.GetRegion()) + // data.Size = types.Sometype(apiResponse.GetSize()) + // data.Sort = types.Sometype(apiResponse.GetSort())// Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, fmt.Sprintf("%s read successful", errorPrefix)) +} diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go new file mode 100644 index 00000000..a9d35ba1 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go @@ -0,0 +1,1909 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func FlavorsDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "flavors": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "cpu": schema.Int64Attribute{ + Computed: true, + Description: "The cpu count of the instance.", + MarkdownDescription: "The cpu count of the instance.", + }, + "description": schema.StringAttribute{ + Computed: true, + Description: "The flavor description.", + MarkdownDescription: "The flavor description.", + }, + "tf_original_api_id": schema.StringAttribute{ + Computed: true, + Description: "The id of the instance flavor.", + MarkdownDescription: "The id of the instance flavor.", + }, + "max_gb": schema.Int64Attribute{ + Computed: true, + Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + }, + "memory": schema.Int64Attribute{ + Computed: true, + Description: "The memory of the instance in Gibibyte.", + MarkdownDescription: "The memory of the instance in Gibibyte.", + }, + "min_gb": schema.Int64Attribute{ + Computed: true, + Description: "minimum storage which is required to order in Gigabyte.", + MarkdownDescription: "minimum storage which is required to order in Gigabyte.", + }, + "node_type": schema.StringAttribute{ + Computed: true, + Description: "defines the nodeType it can be either single or HA", + MarkdownDescription: "defines the nodeType it can be either single or HA", + }, + "storage_classes": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "class": schema.StringAttribute{ + Computed: true, + }, + "max_io_per_sec": schema.Int64Attribute{ + Computed: true, + }, + "max_through_in_mb": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: StorageClassesType{ + ObjectType: types.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + }, + }, + CustomType: FlavorsType{ + ObjectType: types.ObjectType{ + AttrTypes: FlavorsValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "List of flavors available for the project.", + MarkdownDescription: "List of flavors available for the project.", + }, + "page": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of the page of items list to be returned.", + MarkdownDescription: "Number of the page of items list to be returned.", + }, + "pagination": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "page": schema.Int64Attribute{ + Computed: true, + }, + "size": schema.Int64Attribute{ + Computed: true, + }, + "sort": schema.StringAttribute{ + Computed: true, + }, + "total_pages": schema.Int64Attribute{ + Computed: true, + }, + "total_rows": schema.Int64Attribute{ + Computed: true, + }, + }, + CustomType: PaginationType{ + ObjectType: types.ObjectType{ + AttrTypes: PaginationValue{}.AttributeTypes(ctx), + }, + }, + Computed: true, + }, + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "size": schema.Int64Attribute{ + Optional: true, + Computed: true, + Description: "Number of items to be returned on each page.", + MarkdownDescription: "Number of items to be returned on each page.", + }, + "sort": schema.StringAttribute{ + Optional: true, + Computed: true, + Description: "Sorting of the flavors to be returned on each page.", + MarkdownDescription: "Sorting of the flavors to be returned on each page.", + Validators: []validator.String{ + stringvalidator.OneOf( + "index.desc", + "index.asc", + "cpu.desc", + "cpu.asc", + "flavor_description.asc", + "flavor_description.desc", + "id.desc", + "id.asc", + "size_max.desc", + "size_max.asc", + "ram.desc", + "ram.asc", + "size_min.desc", + "size_min.asc", + "storage_class.asc", + "storage_class.desc", + "node_type.asc", + "node_type.desc", + ), + }, + }, + }, + } +} + +type FlavorsModel struct { + Flavors types.List `tfsdk:"flavors"` + Page types.Int64 `tfsdk:"page"` + Pagination PaginationValue `tfsdk:"pagination"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Size types.Int64 `tfsdk:"size"` + Sort types.String `tfsdk:"sort"` +} + +var _ basetypes.ObjectTypable = FlavorsType{} + +type FlavorsType struct { + basetypes.ObjectType +} + +func (t FlavorsType) Equal(o attr.Type) bool { + other, ok := o.(FlavorsType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t FlavorsType) String() string { + return "FlavorsType" +} + +func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + cpuAttribute, ok := attributes["cpu"] + + if !ok { + diags.AddError( + "Attribute Missing", + `cpu is missing from object`) + + return nil, diags + } + + cpuVal, ok := cpuAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute)) + } + + descriptionAttribute, ok := attributes["description"] + + if !ok { + diags.AddError( + "Attribute Missing", + `description is missing from object`) + + return nil, diags + } + + descriptionVal, ok := descriptionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute)) + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return nil, diags + } + + idVal, ok := idAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) + } + + maxGbAttribute, ok := attributes["max_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_gb is missing from object`) + + return nil, diags + } + + maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute)) + } + + memoryAttribute, ok := attributes["memory"] + + if !ok { + diags.AddError( + "Attribute Missing", + `memory is missing from object`) + + return nil, diags + } + + memoryVal, ok := memoryAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute)) + } + + minGbAttribute, ok := attributes["min_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `min_gb is missing from object`) + + return nil, diags + } + + minGbVal, ok := minGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute)) + } + + nodeTypeAttribute, ok := attributes["node_type"] + + if !ok { + diags.AddError( + "Attribute Missing", + `node_type is missing from object`) + + return nil, diags + } + + nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute)) + } + + storageClassesAttribute, ok := attributes["storage_classes"] + + if !ok { + diags.AddError( + "Attribute Missing", + `storage_classes is missing from object`) + + return nil, diags + } + + storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return FlavorsValue{ + Cpu: cpuVal, + Description: descriptionVal, + Id: idVal, + MaxGb: maxGbVal, + Memory: memoryVal, + MinGb: minGbVal, + NodeType: nodeTypeVal, + StorageClasses: storageClassesVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewFlavorsValueNull() FlavorsValue { + return FlavorsValue{ + state: attr.ValueStateNull, + } +} + +func NewFlavorsValueUnknown() FlavorsValue { + return FlavorsValue{ + state: attr.ValueStateUnknown, + } +} + +func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing FlavorsValue Attribute Value", + "While creating a FlavorsValue value, a missing attribute value was detected. "+ + "A FlavorsValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid FlavorsValue Attribute Type", + "While creating a FlavorsValue value, an invalid attribute value was detected. "+ + "A FlavorsValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra FlavorsValue Attribute Value", + "While creating a FlavorsValue value, an extra attribute value was detected. "+ + "A FlavorsValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewFlavorsValueUnknown(), diags + } + + cpuAttribute, ok := attributes["cpu"] + + if !ok { + diags.AddError( + "Attribute Missing", + `cpu is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + cpuVal, ok := cpuAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute)) + } + + descriptionAttribute, ok := attributes["description"] + + if !ok { + diags.AddError( + "Attribute Missing", + `description is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + descriptionVal, ok := descriptionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute)) + } + + idAttribute, ok := attributes["id"] + + if !ok { + diags.AddError( + "Attribute Missing", + `id is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + idVal, ok := idAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute)) + } + + maxGbAttribute, ok := attributes["max_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_gb is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute)) + } + + memoryAttribute, ok := attributes["memory"] + + if !ok { + diags.AddError( + "Attribute Missing", + `memory is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + memoryVal, ok := memoryAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute)) + } + + minGbAttribute, ok := attributes["min_gb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `min_gb is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + minGbVal, ok := minGbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute)) + } + + nodeTypeAttribute, ok := attributes["node_type"] + + if !ok { + diags.AddError( + "Attribute Missing", + `node_type is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute)) + } + + storageClassesAttribute, ok := attributes["storage_classes"] + + if !ok { + diags.AddError( + "Attribute Missing", + `storage_classes is missing from object`) + + return NewFlavorsValueUnknown(), diags + } + + storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute)) + } + + if diags.HasError() { + return NewFlavorsValueUnknown(), diags + } + + return FlavorsValue{ + Cpu: cpuVal, + Description: descriptionVal, + Id: idVal, + MaxGb: maxGbVal, + Memory: memoryVal, + MinGb: minGbVal, + NodeType: nodeTypeVal, + StorageClasses: storageClassesVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue { + object, diags := NewFlavorsValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewFlavorsValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewFlavorsValueUnknown(), nil + } + + if in.IsNull() { + return NewFlavorsValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t FlavorsType) ValueType(ctx context.Context) attr.Value { + return FlavorsValue{} +} + +var _ basetypes.ObjectValuable = FlavorsValue{} + +type FlavorsValue struct { + Cpu basetypes.Int64Value `tfsdk:"cpu"` + Description basetypes.StringValue `tfsdk:"description"` + Id basetypes.StringValue `tfsdk:"id"` + MaxGb basetypes.Int64Value `tfsdk:"max_gb"` + Memory basetypes.Int64Value `tfsdk:"memory"` + MinGb basetypes.Int64Value `tfsdk:"min_gb"` + NodeType basetypes.StringValue `tfsdk:"node_type"` + StorageClasses basetypes.ListValue `tfsdk:"storage_classes"` + state attr.ValueState +} + +func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 8) + + var val tftypes.Value + var err error + + attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["storage_classes"] = basetypes.ListType{ + ElemType: StorageClassesValue{}.Type(ctx), + }.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 8) + + val, err = v.Cpu.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["cpu"] = val + + val, err = v.Description.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["description"] = val + + val, err = v.Id.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["id"] = val + + val, err = v.MaxGb.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["max_gb"] = val + + val, err = v.Memory.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["memory"] = val + + val, err = v.MinGb.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["min_gb"] = val + + val, err = v.NodeType.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["node_type"] = val + + val, err = v.StorageClasses.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["storage_classes"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v FlavorsValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v FlavorsValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v FlavorsValue) String() string { + return "FlavorsValue" +} + +func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + storageClasses := types.ListValueMust( + StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + v.StorageClasses.Elements(), + ) + + if v.StorageClasses.IsNull() { + storageClasses = types.ListNull( + StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + ) + } + + if v.StorageClasses.IsUnknown() { + storageClasses = types.ListUnknown( + StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: StorageClassesValue{}.AttributeTypes(ctx), + }, + }, + ) + } + + attributeTypes := map[string]attr.Type{ + "cpu": basetypes.Int64Type{}, + "description": basetypes.StringType{}, + "id": basetypes.StringType{}, + "max_gb": basetypes.Int64Type{}, + "memory": basetypes.Int64Type{}, + "min_gb": basetypes.Int64Type{}, + "node_type": basetypes.StringType{}, + "storage_classes": basetypes.ListType{ + ElemType: StorageClassesValue{}.Type(ctx), + }, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "cpu": v.Cpu, + "description": v.Description, + "id": v.Id, + "max_gb": v.MaxGb, + "memory": v.Memory, + "min_gb": v.MinGb, + "node_type": v.NodeType, + "storage_classes": storageClasses, + }) + + return objVal, diags +} + +func (v FlavorsValue) Equal(o attr.Value) bool { + other, ok := o.(FlavorsValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Cpu.Equal(other.Cpu) { + return false + } + + if !v.Description.Equal(other.Description) { + return false + } + + if !v.Id.Equal(other.Id) { + return false + } + + if !v.MaxGb.Equal(other.MaxGb) { + return false + } + + if !v.Memory.Equal(other.Memory) { + return false + } + + if !v.MinGb.Equal(other.MinGb) { + return false + } + + if !v.NodeType.Equal(other.NodeType) { + return false + } + + if !v.StorageClasses.Equal(other.StorageClasses) { + return false + } + + return true +} + +func (v FlavorsValue) Type(ctx context.Context) attr.Type { + return FlavorsType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "cpu": basetypes.Int64Type{}, + "description": basetypes.StringType{}, + "id": basetypes.StringType{}, + "max_gb": basetypes.Int64Type{}, + "memory": basetypes.Int64Type{}, + "min_gb": basetypes.Int64Type{}, + "node_type": basetypes.StringType{}, + "storage_classes": basetypes.ListType{ + ElemType: StorageClassesValue{}.Type(ctx), + }, + } +} + +var _ basetypes.ObjectTypable = StorageClassesType{} + +type StorageClassesType struct { + basetypes.ObjectType +} + +func (t StorageClassesType) Equal(o attr.Type) bool { + other, ok := o.(StorageClassesType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t StorageClassesType) String() string { + return "StorageClassesType" +} + +func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return nil, diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + maxIoPerSecAttribute, ok := attributes["max_io_per_sec"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_io_per_sec is missing from object`) + + return nil, diags + } + + maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute)) + } + + maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_through_in_mb is missing from object`) + + return nil, diags + } + + maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return StorageClassesValue{ + Class: classVal, + MaxIoPerSec: maxIoPerSecVal, + MaxThroughInMb: maxThroughInMbVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageClassesValueNull() StorageClassesValue { + return StorageClassesValue{ + state: attr.ValueStateNull, + } +} + +func NewStorageClassesValueUnknown() StorageClassesValue { + return StorageClassesValue{ + state: attr.ValueStateUnknown, + } +} + +func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing StorageClassesValue Attribute Value", + "While creating a StorageClassesValue value, a missing attribute value was detected. "+ + "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid StorageClassesValue Attribute Type", + "While creating a StorageClassesValue value, an invalid attribute value was detected. "+ + "A StorageClassesValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra StorageClassesValue Attribute Value", + "While creating a StorageClassesValue value, an extra attribute value was detected. "+ + "A StorageClassesValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewStorageClassesValueUnknown(), diags + } + + classAttribute, ok := attributes["class"] + + if !ok { + diags.AddError( + "Attribute Missing", + `class is missing from object`) + + return NewStorageClassesValueUnknown(), diags + } + + classVal, ok := classAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute)) + } + + maxIoPerSecAttribute, ok := attributes["max_io_per_sec"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_io_per_sec is missing from object`) + + return NewStorageClassesValueUnknown(), diags + } + + maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute)) + } + + maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] + + if !ok { + diags.AddError( + "Attribute Missing", + `max_through_in_mb is missing from object`) + + return NewStorageClassesValueUnknown(), diags + } + + maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute)) + } + + if diags.HasError() { + return NewStorageClassesValueUnknown(), diags + } + + return StorageClassesValue{ + Class: classVal, + MaxIoPerSec: maxIoPerSecVal, + MaxThroughInMb: maxThroughInMbVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue { + object, diags := NewStorageClassesValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewStorageClassesValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewStorageClassesValueUnknown(), nil + } + + if in.IsNull() { + return NewStorageClassesValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t StorageClassesType) ValueType(ctx context.Context) attr.Value { + return StorageClassesValue{} +} + +var _ basetypes.ObjectValuable = StorageClassesValue{} + +type StorageClassesValue struct { + Class basetypes.StringValue `tfsdk:"class"` + MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"` + MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"` + state attr.ValueState +} + +func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 3) + + var val tftypes.Value + var err error + + attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 3) + + val, err = v.Class.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["class"] = val + + val, err = v.MaxIoPerSec.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["max_io_per_sec"] = val + + val, err = v.MaxThroughInMb.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["max_through_in_mb"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v StorageClassesValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v StorageClassesValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v StorageClassesValue) String() string { + return "StorageClassesValue" +} + +func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "class": basetypes.StringType{}, + "max_io_per_sec": basetypes.Int64Type{}, + "max_through_in_mb": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "class": v.Class, + "max_io_per_sec": v.MaxIoPerSec, + "max_through_in_mb": v.MaxThroughInMb, + }) + + return objVal, diags +} + +func (v StorageClassesValue) Equal(o attr.Value) bool { + other, ok := o.(StorageClassesValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Class.Equal(other.Class) { + return false + } + + if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) { + return false + } + + if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) { + return false + } + + return true +} + +func (v StorageClassesValue) Type(ctx context.Context) attr.Type { + return StorageClassesType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "class": basetypes.StringType{}, + "max_io_per_sec": basetypes.Int64Type{}, + "max_through_in_mb": basetypes.Int64Type{}, + } +} + +var _ basetypes.ObjectTypable = PaginationType{} + +type PaginationType struct { + basetypes.ObjectType +} + +func (t PaginationType) Equal(o attr.Type) bool { + other, ok := o.(PaginationType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t PaginationType) String() string { + return "PaginationType" +} + +func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return nil, diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return nil, diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return nil, diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return nil, diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return nil, diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueNull() PaginationValue { + return PaginationValue{ + state: attr.ValueStateNull, + } +} + +func NewPaginationValueUnknown() PaginationValue { + return PaginationValue{ + state: attr.ValueStateUnknown, + } +} + +func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing PaginationValue Attribute Value", + "While creating a PaginationValue value, a missing attribute value was detected. "+ + "A PaginationValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid PaginationValue Attribute Type", + "While creating a PaginationValue value, an invalid attribute value was detected. "+ + "A PaginationValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra PaginationValue Attribute Value", + "While creating a PaginationValue value, an extra attribute value was detected. "+ + "A PaginationValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + pageAttribute, ok := attributes["page"] + + if !ok { + diags.AddError( + "Attribute Missing", + `page is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + pageVal, ok := pageAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute)) + } + + sizeAttribute, ok := attributes["size"] + + if !ok { + diags.AddError( + "Attribute Missing", + `size is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sizeVal, ok := sizeAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute)) + } + + sortAttribute, ok := attributes["sort"] + + if !ok { + diags.AddError( + "Attribute Missing", + `sort is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + sortVal, ok := sortAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute)) + } + + totalPagesAttribute, ok := attributes["total_pages"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_pages is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute)) + } + + totalRowsAttribute, ok := attributes["total_rows"] + + if !ok { + diags.AddError( + "Attribute Missing", + `total_rows is missing from object`) + + return NewPaginationValueUnknown(), diags + } + + totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute)) + } + + if diags.HasError() { + return NewPaginationValueUnknown(), diags + } + + return PaginationValue{ + Page: pageVal, + Size: sizeVal, + Sort: sortVal, + TotalPages: totalPagesVal, + TotalRows: totalRowsVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue { + object, diags := NewPaginationValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewPaginationValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewPaginationValueUnknown(), nil + } + + if in.IsNull() { + return NewPaginationValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t PaginationType) ValueType(ctx context.Context) attr.Value { + return PaginationValue{} +} + +var _ basetypes.ObjectValuable = PaginationValue{} + +type PaginationValue struct { + Page basetypes.Int64Value `tfsdk:"page"` + Size basetypes.Int64Value `tfsdk:"size"` + Sort basetypes.StringValue `tfsdk:"sort"` + TotalPages basetypes.Int64Value `tfsdk:"total_pages"` + TotalRows basetypes.Int64Value `tfsdk:"total_rows"` + state attr.ValueState +} + +func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 5) + + var val tftypes.Value + var err error + + attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx) + attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 5) + + val, err = v.Page.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["page"] = val + + val, err = v.Size.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["size"] = val + + val, err = v.Sort.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["sort"] = val + + val, err = v.TotalPages.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_pages"] = val + + val, err = v.TotalRows.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["total_rows"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v PaginationValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v PaginationValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v PaginationValue) String() string { + return "PaginationValue" +} + +func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "page": v.Page, + "size": v.Size, + "sort": v.Sort, + "total_pages": v.TotalPages, + "total_rows": v.TotalRows, + }) + + return objVal, diags +} + +func (v PaginationValue) Equal(o attr.Value) bool { + other, ok := o.(PaginationValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Page.Equal(other.Page) { + return false + } + + if !v.Size.Equal(other.Size) { + return false + } + + if !v.Sort.Equal(other.Sort) { + return false + } + + if !v.TotalPages.Equal(other.TotalPages) { + return false + } + + if !v.TotalRows.Equal(other.TotalRows) { + return false + } + + return true +} + +func (v PaginationValue) Type(ctx context.Context) attr.Type { + return PaginationType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "page": basetypes.Int64Type{}, + "size": basetypes.Int64Type{}, + "sort": basetypes.StringType{}, + "total_pages": basetypes.Int64Type{}, + "total_rows": basetypes.Int64Type{}, + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go index 842a4cfd..85834b26 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go @@ -6,6 +6,7 @@ import ( "net/http" "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" @@ -25,12 +26,22 @@ func NewInstanceDataSource() datasource.DataSource { return &instanceDataSource{} } +// dataSourceModel maps the data source schema data. +type dataSourceModel struct { + sqlserverflexbetaGen.InstanceModel + TerraformID types.String `tfsdk:"id"` +} + type instanceDataSource struct { client *sqlserverflexbetaPkg.APIClient providerData core.ProviderData } -func (d *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *instanceDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance" } @@ -81,7 +92,7 @@ func (d *instanceDataSource) Configure( } func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data sqlserverflexbetaGen.InstanceModel + var data dataSourceModel // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go index 87476c3c..f3226581 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go @@ -65,7 +65,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema { Description: "The id of the instance flavor.", MarkdownDescription: "The id of the instance flavor.", }, - "id": schema.StringAttribute{ + "tf_original_api_id": schema.StringAttribute{ Computed: true, Description: "The ID of the instance.", MarkdownDescription: "The ID of the instance.", @@ -178,7 +178,7 @@ type InstanceModel struct { Edition types.String `tfsdk:"edition"` Encryption EncryptionValue `tfsdk:"encryption"` FlavorId types.String `tfsdk:"flavor_id"` - Id types.String `tfsdk:"id"` + Id types.String `tfsdk:"tf_original_api_id"` InstanceId types.String `tfsdk:"instance_id"` IsDeletable types.Bool `tfsdk:"is_deletable"` Name types.String `tfsdk:"name"` diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go index e9e1db57..25f3af0c 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go @@ -84,7 +84,7 @@ func mapResponseToModel( func mapDataResponseToModel( ctx context.Context, resp *sqlserverflexbeta.GetInstanceResponse, - m *sqlserverflexbetaDataGen.InstanceModel, + m *dataSourceModel, tfDiags diag.Diagnostics, ) error { m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) @@ -181,7 +181,7 @@ func handleEncryption( } func handleDSEncryption( - m *sqlserverflexbetaDataGen.InstanceModel, + m *dataSourceModel, resp *sqlserverflexbeta.GetInstanceResponse, ) sqlserverflexbetaDataGen.EncryptionValue { if !resp.HasEncryption() || diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go index 5d0b47d8..5ccfa3c0 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -43,21 +43,48 @@ type instanceResource struct { providerData core.ProviderData } +// resourceModel describes the resource data model. +type resourceModel = sqlserverflexbetaResGen.InstanceModel + type InstanceResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` InstanceID types.String `tfsdk:"instance_id"` } -func (r *instanceResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *instanceResource) Metadata( + ctx context.Context, + req resource.MetadataRequest, + resp *resource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance" } +//go:embed planModifiers.yaml +var modifiersFileByte []byte + func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = sqlserverflexbetaResGen.InstanceResourceSchema(ctx) + s := sqlserverflexbetaResGen.InstanceResourceSchema(ctx) + + fields, err := utils.ReadModifiersConfig(modifiersFileByte) + if err != nil { + resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) + return + } + + err = utils.AddPlanModifiersToResourceSchema(fields, &s) + if err != nil { + resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) + return + } + resp.Schema = s } -func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { +func (r *instanceResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + resp *resource.IdentitySchemaResponse, +) { resp.IdentitySchema = identityschema.Schema{ Attributes: map[string]identityschema.Attribute{ "project_id": identityschema.StringAttribute{ @@ -90,7 +117,10 @@ func (r *instanceResource) Configure( utils.UserAgentConfigOption(r.providerData.Version), } if r.providerData.SQLServerFlexCustomEndpoint != "" { - apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint)) + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint), + ) } else { apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) } @@ -121,7 +151,7 @@ func (r *instanceResource) ModifyPlan( if req.Config.Raw.IsNull() { return } - var configModel sqlserverflexbetaResGen.InstanceModel + var configModel resourceModel resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) if resp.Diagnostics.HasError() { return @@ -147,11 +177,8 @@ func (r *instanceResource) ModifyPlan( } } -//go:embed planModifiers.yaml -var modifiersFileByte []byte - func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data sqlserverflexbetaResGen.InstanceModel + var data resourceModel crateErr := "[SQL Server Flex BETA - Create] error" // Read Terraform plan data into the model @@ -257,7 +284,7 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques } func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data sqlserverflexbetaResGen.InstanceModel + var data resourceModel // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -324,7 +351,7 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r } func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data sqlserverflexbetaResGen.InstanceModel + var data resourceModel updateInstanceError := "Error updating instance" resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) @@ -411,7 +438,7 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques } func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data sqlserverflexbetaResGen.InstanceModel + var data resourceModel // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -484,9 +511,13 @@ func (r *instanceResource) ImportState( idParts := strings.Split(req.ID, core.Separator) if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" { - core.LogAndAddError(ctx, &resp.Diagnostics, + core.LogAndAddError( + ctx, &resp.Diagnostics, "Error importing instance", - fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID), + fmt.Sprintf( + "Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", + req.ID, + ), ) return } @@ -497,25 +528,20 @@ func (r *instanceResource) ImportState( return } + // If no ID is provided, attempt to read identity attributes from the import configuration var identityData InstanceResourceIdentityModel resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) if resp.Diagnostics.HasError() { return } - resp.Diagnostics.Append( - resp.State.SetAttribute( - ctx, - path.Root("id"), - utils.BuildInternalTerraformId( - identityData.ProjectID.ValueString(), - identityData.Region.ValueString(), - identityData.InstanceID.ValueString(), - ), - )...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...) + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) tflog.Info(ctx, "Sqlserverflexbeta instance state imported") } diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go b/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go index 64acf850..effced4e 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go @@ -13,6 +13,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/knownvalue" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-plugin-testing/statecheck" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-plugin-testing/tfjsonpath" @@ -160,6 +161,12 @@ func TestAccResourceExample_basic(t *testing.T) { // test create { Config: exBefore, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resName, plancheck.ResourceActionCreate), + plancheck.ExpectNonEmptyPlan(), + }, + }, ConfigStateChecks: []statecheck.StateCheck{ compareValuesSame.AddStateValue( resName, diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go index df1a8033..e6491a0f 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/datasource.go @@ -6,6 +6,7 @@ import ( "net/http" "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" @@ -25,12 +26,30 @@ func NewUserDataSource() datasource.DataSource { return &userDataSource{} } +type dataSourceModel struct { + DefaultDatabase types.String `tfsdk:"default_database"` + Host types.String `tfsdk:"host"` + Id types.Int64 `tfsdk:"id"` + InstanceId types.String `tfsdk:"instance_id"` + Port types.Int64 `tfsdk:"port"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Roles types.List `tfsdk:"roles"` + Status types.String `tfsdk:"status"` + UserId types.Int64 `tfsdk:"user_id"` + Username types.String `tfsdk:"username"` +} + type userDataSource struct { client *sqlserverflexbetaPkg.APIClient providerData core.ProviderData } -func (d *userDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *userDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user" } @@ -59,7 +78,7 @@ func (d *userDataSource) Configure( } func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data sqlserverflexbetaGen.UserModel + var data dataSourceModel // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) @@ -72,13 +91,15 @@ func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, r projectId := data.ProjectId.ValueString() region := d.providerData.GetRegionWithOverride(data.Region) - userId := data.UserId.ValueString() + instanceId := data.InstanceId.ValueString() + userId := data.UserId.ValueInt64() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "user_id", userId) - userResp, err := d.client.GetUserRequest(ctx, projectId, region, userId).Execute() + userResp, err := d.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute() if err != nil { utils.LogError( ctx, diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go index 1950c24e..34aef9ca 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go +++ b/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go @@ -98,7 +98,7 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema { "users": schema.ListNestedAttribute{ NestedObject: schema.NestedAttributeObject{ Attributes: map[string]schema.Attribute{ - "id": schema.Int64Attribute{ + "tf_original_api_id": schema.Int64Attribute{ Computed: true, Description: "The ID of the user.", MarkdownDescription: "The ID of the user.", diff --git a/stackit/internal/services/sqlserverflexbeta/user/functions.go b/stackit/internal/services/sqlserverflexbeta/user/functions.go index b565f761..83ce641f 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/user/functions.go @@ -3,13 +3,9 @@ package sqlserverflexbeta import ( "context" "fmt" - "math" - "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen" @@ -18,11 +14,39 @@ import ( func mapResponseToModel( ctx context.Context, resp *sqlserverflexbeta.GetUserResponse, - m *sqlserverflexbetaResGen.UserModel, + m *dataSourceModel, tfDiags diag.Diagnostics, ) error { + if resp == nil { + return fmt.Errorf("response is nil") + } + + m.Id = types.Int64Value(resp.GetId()) + m.UserId = types.Int64Value(resp.GetId()) + m.Username = types.StringValue(resp.GetUsername()) + m.Port = types.Int64Value(resp.GetPort()) + m.Host = types.StringValue(resp.GetHost()) + m.DefaultDatabase = types.StringValue(resp.GetDefaultDatabase()) + m.Status = types.StringValue(resp.GetStatus()) + + if resp.Roles != nil { + roles, diags := types.ListValueFrom(ctx, types.StringType, *resp.Roles) + tfDiags.Append(diags...) + if tfDiags.HasError() { + return fmt.Errorf("failed to map roles") + } + m.Roles = roles + } else { + m.Roles = types.ListNull(types.StringType) + } + + if resp.Status != nil { + m.Status = types.StringValue(*resp.Status) + } else { + m.Status = types.StringNull() + } + // TODO: complete and refactor - m.Id = types.StringValue(resp.GetId()) /* sampleList, diags := types.ListValueFrom(ctx, types.StringType, resp.GetList()) @@ -51,48 +75,63 @@ func mapResponseToModel( return nil } +// TODO: handle encryption field mapping when API supports it func handleEncryption( - m *sqlserverflexbetaResGen.UserModel, + m *dataSourceModel, resp *sqlserverflexbeta.GetUserResponse, ) sqlserverflexbetaResGen.EncryptionValue { - if !resp.HasEncryption() || - resp.Encryption == nil || - resp.Encryption.KekKeyId == nil || - resp.Encryption.KekKeyRingId == nil || - resp.Encryption.KekKeyVersion == nil || - resp.Encryption.ServiceAccount == nil { + /* + if !resp.HasEncryption() || - if m.Encryption.IsNull() || m.Encryption.IsUnknown() { - return sqlserverflexbetaResGen.NewEncryptionValueNull() + resp.Encryption == nil || + resp.Encryption.KekKeyId == nil || + resp.Encryption.KekKeyRingId == nil || + resp.Encryption.KekKeyVersion == nil || + resp.Encryption.ServiceAccount == nil { + + if m.Encryption.IsNull() || m.Encryption.IsUnknown() { + return sqlserverflexbetaResGen.NewEncryptionValueNull() + } + return m.Encryption } - return m.Encryption - } - enc := sqlserverflexbetaResGen.NewEncryptionValueNull() - if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { - enc.KekKeyId = types.StringValue(kVal) - } - if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { - enc.KekKeyRingId = types.StringValue(kkVal) - } - if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { - enc.KekKeyVersion = types.StringValue(kkvVal) - } - if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { - enc.ServiceAccount = types.StringValue(sa) - } - return enc + enc := sqlserverflexbetaResGen.NewEncryptionValueNull() + if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { + enc.KekKeyId = types.StringValue(kVal) + } + if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { + enc.KekKeyRingId = types.StringValue(kkVal) + } + if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { + enc.KekKeyVersion = types.StringValue(kkvVal) + } + if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { + enc.ServiceAccount = types.StringValue(sa) + } + return enc + */ + return sqlserverflexbetaResGen.NewEncryptionValueNull() } func toCreatePayload( ctx context.Context, - model *sqlserverflexbetaResGen.UserModel, + model *dataSourceModel, ) (*sqlserverflexbeta.CreateUserRequestPayload, error) { if model == nil { return nil, fmt.Errorf("nil model") } + var roles []sqlserverflexbeta.UserRole + if !model.Roles.IsNull() && !model.Roles.IsUnknown() { + diags := model.Roles.ElementsAs(ctx, &roles, false) + if diags.HasError() { + return nil, fmt.Errorf("failed to convert roles: %v", diags) + } + } + return &sqlserverflexbeta.CreateUserRequestPayload{ - // TODO: fill fields + DefaultDatabase: model.DefaultDatabase.ValueStringPointer(), + Username: model.Username.ValueStringPointer(), + Roles: &roles, }, nil } diff --git a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml new file mode 100644 index 00000000..fe4025ee --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml @@ -0,0 +1,51 @@ +fields: + - name: 'id' + modifiers: + - 'UseStateForUnknown' + + - name: 'user_id' + modifiers: + - 'UseStateForUnknown' + + - name: 'instance_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'UseStateForUnknown' + + - name: 'project_id' + validators: + - validate.NoSeparator + - validate.UUID + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'region' + modifiers: + - 'RequiresReplace' + + - name: 'user_id' + modifiers: + - 'RequiresReplace' + + - name: 'username' + modifiers: + - 'UseStateForUnknown' + + - name: 'roles' + modifiers: + - 'UseStateForUnknown' + + - name: 'password' + modifiers: + - 'UseStateForUnknown' + + - name: 'uri' + modifiers: + - 'UseStateForUnknown' + + - name: 'status' + modifiers: + - 'UseStateForUnknown' diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index e2692e13..40f78c7f 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -2,7 +2,9 @@ package sqlserverflexbeta import ( "context" + _ "embed" "fmt" + "strconv" "strings" "github.com/hashicorp/terraform-plugin-framework/path" @@ -11,9 +13,8 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" @@ -33,27 +34,52 @@ func NewUserResource() resource.Resource { return &userResource{} } +// resourceModel describes the resource data model. +type resourceModel = sqlserverflexbetaResGen.UserModel + +// UserResourceIdentityModel describes the resource's identity attributes. +type UserResourceIdentityModel struct { + ProjectID types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + InstanceID types.String `tfsdk:"instance_id"` + UserID types.Int64 `tfsdk:"database_id"` +} + type userResource struct { client *sqlserverflexbeta.APIClient providerData core.ProviderData } -type UserResourceIdentityModel struct { - ProjectID types.String `tfsdk:"project_id"` - Region types.String `tfsdk:"region"` - UserID types.String `tfsdk:"instance_id"` - // TODO: implement further needed parts -} - func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user" } -func (r *userResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = sqlserverflexbetaResGen.UserResourceSchema(ctx) +//go:embed planModifiers.yaml +var modifiersFileByte []byte + +func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + + s := sqlserverflexbetaResGen.UserResourceSchema(ctx) + + fields, err := utils.ReadModifiersConfig(modifiersFileByte) + if err != nil { + resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) + return + } + + err = utils.AddPlanModifiersToResourceSchema(fields, &s) + if err != nil { + resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) + return + } + resp.Schema = s } -func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { +func (r *userResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + resp *resource.IdentitySchemaResponse, +) { resp.IdentitySchema = identityschema.Schema{ Attributes: map[string]identityschema.Attribute{ "project_id": identityschema.StringAttribute{ @@ -85,8 +111,11 @@ func (r *userResource) Configure( config.WithCustomAuth(r.providerData.RoundTripper), utils.UserAgentConfigOption(r.providerData.Version), } - if r.providerData.SqlserverflexbetaCustomEndpoint != "" { - apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.sqlserverflexbetaCustomEndpoint)) + if r.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint), + ) } else { apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) } @@ -106,7 +135,7 @@ func (r *userResource) Configure( } func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data sqlserverflexbetaResGen.UserModel + var data resourceModel // Read Terraform plan data into the model resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) @@ -159,14 +188,14 @@ func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, r */ // Example data value setting - data.UserId = types.StringValue("id-from-response") + //data.UserId = types.StringValue("id-from-response") // TODO: Set data returned by API in identity identity := UserResourceIdentityModel{ ProjectID: types.StringValue(projectId), Region: types.StringValue(region), // TODO: add missing values - UserID: types.StringValue(UserId), + // UserID: types.StringValue(UserId), } resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) if resp.Diagnostics.HasError() { @@ -228,7 +257,7 @@ func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, r } func (r *userResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data sqlserverflexbetaResGen.UserModel + var data resourceModel // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -270,7 +299,7 @@ func (r *userResource) Read(ctx context.Context, req resource.ReadRequest, resp } func (r *userResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data sqlserverflexbetaResGen.UserModel + var data resourceModel // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -301,7 +330,7 @@ func (r *userResource) Update(ctx context.Context, req resource.UpdateRequest, r } func (r *userResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data sqlserverflexbetaResGen.UserModel + var data resourceModel // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -335,7 +364,7 @@ func (r *userResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - var configModel sqlserverflexbetaResGen.UserModel + var configModel resourceModel // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return @@ -345,7 +374,7 @@ func (r *userResource) ModifyPlan( return } - var planModel sqlserverflexbetaResGen.UserModel + var planModel resourceModel resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) if resp.Diagnostics.HasError() { return @@ -382,24 +411,61 @@ func (r *userResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - idParts := strings.Split(req.ID, core.Separator) + ctx = core.InitProviderContext(ctx) + + if req.ID != "" { + + idParts := strings.Split(req.ID, core.Separator) + + if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { + core.LogAndAddError( + ctx, &resp.Diagnostics, + "Error importing user", + fmt.Sprintf( + "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q", + req.ID, + ), + ) + return + } + + userId, err := strconv.ParseInt(idParts[3], 10, 64) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error importing user", + fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...) + + tflog.Info(ctx, "Sqlserverflexbeta user state imported") - // Todo: Import logic - if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" { - core.LogAndAddError( - ctx, &resp.Diagnostics, - "Error importing database", - fmt.Sprintf( - "Expected import identifier with format [project_id],[region],..., got %q", - req.ID, - ), - ) return } - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) - // ... more ... + // If no ID is provided, attempt to read identity attributes from the import configuration + var identityData UserResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + projectId := identityData.ProjectID.ValueString() + region := identityData.Region.ValueString() + instanceId := identityData.InstanceID.ValueString() + userId := identityData.UserID.ValueInt64() + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...) core.LogAndAddWarning( ctx, @@ -408,4 +474,5 @@ func (r *userResource) ImportState( "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", ) tflog.Info(ctx, "Sqlserverflexbeta user state imported") + } diff --git a/stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go new file mode 100644 index 00000000..239b44d3 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go @@ -0,0 +1,569 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexbeta + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func VersionDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "versions": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "beta": schema.BoolAttribute{ + Computed: true, + Description: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.", + MarkdownDescription: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.", + }, + "deprecated": schema.StringAttribute{ + Computed: true, + Description: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.", + MarkdownDescription: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.", + }, + "recommend": schema.BoolAttribute{ + Computed: true, + Description: "Flag if the version is recommend by the STACKIT Team.", + MarkdownDescription: "Flag if the version is recommend by the STACKIT Team.", + }, + "version": schema.StringAttribute{ + Computed: true, + Description: "The sqlserver version used for the instance.", + MarkdownDescription: "The sqlserver version used for the instance.", + }, + }, + CustomType: VersionsType{ + ObjectType: types.ObjectType{ + AttrTypes: VersionsValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "A list containing available sqlserver versions.", + MarkdownDescription: "A list containing available sqlserver versions.", + }, + }, + } +} + +type VersionModel struct { + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Versions types.List `tfsdk:"versions"` +} + +var _ basetypes.ObjectTypable = VersionsType{} + +type VersionsType struct { + basetypes.ObjectType +} + +func (t VersionsType) Equal(o attr.Type) bool { + other, ok := o.(VersionsType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t VersionsType) String() string { + return "VersionsType" +} + +func (t VersionsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + betaAttribute, ok := attributes["beta"] + + if !ok { + diags.AddError( + "Attribute Missing", + `beta is missing from object`) + + return nil, diags + } + + betaVal, ok := betaAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute)) + } + + deprecatedAttribute, ok := attributes["deprecated"] + + if !ok { + diags.AddError( + "Attribute Missing", + `deprecated is missing from object`) + + return nil, diags + } + + deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute)) + } + + recommendAttribute, ok := attributes["recommend"] + + if !ok { + diags.AddError( + "Attribute Missing", + `recommend is missing from object`) + + return nil, diags + } + + recommendVal, ok := recommendAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute)) + } + + versionAttribute, ok := attributes["version"] + + if !ok { + diags.AddError( + "Attribute Missing", + `version is missing from object`) + + return nil, diags + } + + versionVal, ok := versionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return VersionsValue{ + Beta: betaVal, + Deprecated: deprecatedVal, + Recommend: recommendVal, + Version: versionVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewVersionsValueNull() VersionsValue { + return VersionsValue{ + state: attr.ValueStateNull, + } +} + +func NewVersionsValueUnknown() VersionsValue { + return VersionsValue{ + state: attr.ValueStateUnknown, + } +} + +func NewVersionsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (VersionsValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing VersionsValue Attribute Value", + "While creating a VersionsValue value, a missing attribute value was detected. "+ + "A VersionsValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid VersionsValue Attribute Type", + "While creating a VersionsValue value, an invalid attribute value was detected. "+ + "A VersionsValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("VersionsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra VersionsValue Attribute Value", + "While creating a VersionsValue value, an extra attribute value was detected. "+ + "A VersionsValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra VersionsValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewVersionsValueUnknown(), diags + } + + betaAttribute, ok := attributes["beta"] + + if !ok { + diags.AddError( + "Attribute Missing", + `beta is missing from object`) + + return NewVersionsValueUnknown(), diags + } + + betaVal, ok := betaAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute)) + } + + deprecatedAttribute, ok := attributes["deprecated"] + + if !ok { + diags.AddError( + "Attribute Missing", + `deprecated is missing from object`) + + return NewVersionsValueUnknown(), diags + } + + deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute)) + } + + recommendAttribute, ok := attributes["recommend"] + + if !ok { + diags.AddError( + "Attribute Missing", + `recommend is missing from object`) + + return NewVersionsValueUnknown(), diags + } + + recommendVal, ok := recommendAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute)) + } + + versionAttribute, ok := attributes["version"] + + if !ok { + diags.AddError( + "Attribute Missing", + `version is missing from object`) + + return NewVersionsValueUnknown(), diags + } + + versionVal, ok := versionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute)) + } + + if diags.HasError() { + return NewVersionsValueUnknown(), diags + } + + return VersionsValue{ + Beta: betaVal, + Deprecated: deprecatedVal, + Recommend: recommendVal, + Version: versionVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewVersionsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) VersionsValue { + object, diags := NewVersionsValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewVersionsValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t VersionsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewVersionsValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewVersionsValueUnknown(), nil + } + + if in.IsNull() { + return NewVersionsValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewVersionsValueMust(VersionsValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t VersionsType) ValueType(ctx context.Context) attr.Value { + return VersionsValue{} +} + +var _ basetypes.ObjectValuable = VersionsValue{} + +type VersionsValue struct { + Beta basetypes.BoolValue `tfsdk:"beta"` + Deprecated basetypes.StringValue `tfsdk:"deprecated"` + Recommend basetypes.BoolValue `tfsdk:"recommend"` + Version basetypes.StringValue `tfsdk:"version"` + state attr.ValueState +} + +func (v VersionsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 4) + + var val tftypes.Value + var err error + + attrTypes["beta"] = basetypes.BoolType{}.TerraformType(ctx) + attrTypes["deprecated"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["recommend"] = basetypes.BoolType{}.TerraformType(ctx) + attrTypes["version"] = basetypes.StringType{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 4) + + val, err = v.Beta.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["beta"] = val + + val, err = v.Deprecated.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["deprecated"] = val + + val, err = v.Recommend.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["recommend"] = val + + val, err = v.Version.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["version"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v VersionsValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v VersionsValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v VersionsValue) String() string { + return "VersionsValue" +} + +func (v VersionsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "beta": basetypes.BoolType{}, + "deprecated": basetypes.StringType{}, + "recommend": basetypes.BoolType{}, + "version": basetypes.StringType{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "beta": v.Beta, + "deprecated": v.Deprecated, + "recommend": v.Recommend, + "version": v.Version, + }) + + return objVal, diags +} + +func (v VersionsValue) Equal(o attr.Value) bool { + other, ok := o.(VersionsValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Beta.Equal(other.Beta) { + return false + } + + if !v.Deprecated.Equal(other.Deprecated) { + return false + } + + if !v.Recommend.Equal(other.Recommend) { + return false + } + + if !v.Version.Equal(other.Version) { + return false + } + + return true +} + +func (v VersionsValue) Type(ctx context.Context) attr.Type { + return VersionsType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v VersionsValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "beta": basetypes.BoolType{}, + "deprecated": basetypes.StringType{}, + "recommend": basetypes.BoolType{}, + "version": basetypes.StringType{}, + } +} diff --git a/stackit/internal/services/postgresflexalpha/utils/planModifiers.go b/stackit/internal/utils/planModifiers.go similarity index 100% rename from stackit/internal/services/postgresflexalpha/utils/planModifiers.go rename to stackit/internal/utils/planModifiers.go diff --git a/stackit/internal/utils/planModifiers_test.go b/stackit/internal/utils/planModifiers_test.go new file mode 100644 index 00000000..337ea36f --- /dev/null +++ b/stackit/internal/utils/planModifiers_test.go @@ -0,0 +1,224 @@ +package utils + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" +) + +func TestReadModifiersConfig(t *testing.T) { + testcases := []struct { + name string + content []byte + wantErr bool + }{ + { + name: "valid yaml", + content: []byte(` +fields: + - name: 'id' + modifiers: + - 'UseStateForUnknown' +`), + wantErr: false, + }, + { + name: "invalid yaml", + content: []byte(`invalid: yaml: :`), + wantErr: true, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + _, err := ReadModifiersConfig(tc.content) + if (err != nil) != tc.wantErr { + t.Errorf("ReadModifiersConfig() error = %v, wantErr %v", err, tc.wantErr) + } + }, + ) + } +} + +func TestAddPlanModifiersToResourceSchema(t *testing.T) { + testcases := []struct { + name string + fields *Fields + sch *schema.Schema + wantErr bool + }{ + { + name: "full coverage - all types and nested structures", + fields: &Fields{ + Fields: []*Field{ + { + Name: "string_attr", + Modifiers: []*string{utils.Ptr("RequiresReplace"), utils.Ptr("UseStateForUnknown")}, + }, + {Name: "bool_attr", Modifiers: []*string{utils.Ptr("RequiresReplace")}}, + {Name: "int_attr", Modifiers: []*string{utils.Ptr("UseStateForUnknown")}}, + {Name: "list_attr", Modifiers: []*string{utils.Ptr("RequiresReplace")}}, + {Name: "Nested.sub_string", Modifiers: []*string{utils.Ptr("RequiresReplace")}}, + }, + }, + sch: &schema.Schema{ + Attributes: map[string]schema.Attribute{ + "StringAttr": schema.StringAttribute{}, + "BoolAttr": schema.BoolAttribute{}, + "IntAttr": schema.Int64Attribute{}, + "ListAttr": schema.ListAttribute{}, + "Nested": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "SubString": schema.StringAttribute{}, + }, + }, + "Unsupported": schema.MapAttribute{ElementType: types.StringType}, // Triggers default/warn case + }, + }, + wantErr: false, + }, + { + name: "validation error - invalid modifier", + fields: &Fields{ + Fields: []*Field{ + {Name: "id", Modifiers: []*string{utils.Ptr("InvalidModifier")}}, + }, + }, + sch: &schema.Schema{ + Attributes: map[string]schema.Attribute{"id": schema.StringAttribute{}}, + }, + wantErr: true, + }, + { + name: "validation error - empty modifier", + fields: &Fields{ + Fields: []*Field{ + {Name: "id", Modifiers: []*string{utils.Ptr("")}}, + }, + }, + sch: &schema.Schema{}, + wantErr: true, + }, + { + name: "nil fields - should return nil", + fields: nil, + sch: &schema.Schema{}, + wantErr: false, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + err := AddPlanModifiersToResourceSchema(tc.fields, tc.sch) + + if (err != nil) != tc.wantErr { + t.Fatalf("AddPlanModifiersToResourceSchema() error = %v, wantErr %v", err, tc.wantErr) + } + + if !tc.wantErr && tc.name == "full coverage - all types and nested structures" { + // Check StringAttr + if sAttr, ok := tc.sch.Attributes["StringAttr"].(schema.StringAttribute); ok { + if len(sAttr.PlanModifiers) != 2 { + t.Errorf("StringAttr: expected 2 modifiers, got %d", len(sAttr.PlanModifiers)) + } + } + + // Check Nested Sub-Attribute + if nested, ok := tc.sch.Attributes["Nested"].(schema.SingleNestedAttribute); ok { + if subAttr, ok := nested.Attributes["SubString"].(schema.StringAttribute); ok { + if len(subAttr.PlanModifiers) != 1 { + // Dies schlug vorher fehl, weil der Prefix "Nested" statt "nested" war + t.Errorf("Nested SubString: expected 1 modifier, got %d", len(subAttr.PlanModifiers)) + } + } else { + t.Error("SubString attribute not found in Nested") + } + } else { + t.Error("Nested attribute not found") + } + } + }, + ) + } +} + +func TestFieldListToMap(t *testing.T) { + testcases := []struct { + name string + fields *Fields + want map[string][]*string + }{ + { + name: "convert list to map", + fields: &Fields{ + Fields: []*Field{ + {Name: "test", Modifiers: []*string{utils.Ptr("mod")}}, + }, + }, + want: map[string][]*string{ + "test": {utils.Ptr("mod")}, + }, + }, + { + name: "nil fields", + fields: nil, + want: map[string][]*string{}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + got := fieldListToMap(tc.fields) + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("fieldListToMap() mismatch (-want +got):\n%s", diff) + } + }, + ) + } +} + +func TestHandleTypeMismatches(t *testing.T) { + modifiers := []*string{utils.Ptr("RequiresReplace")} + + t.Run( + "bool type mismatch", func(t *testing.T) { + _, err := handleBoolPlanModifiers(schema.StringAttribute{}, modifiers) + if err == nil { + t.Error("expected error for type mismatch in handleBoolPlanModifiers") + } + }, + ) + + t.Run( + "string type mismatch", func(t *testing.T) { + _, err := handleStringPlanModifiers(schema.BoolAttribute{}, modifiers) + if err == nil { + t.Error("expected error for type mismatch in handleStringPlanModifiers") + } + }, + ) + + t.Run( + "int64 type mismatch", func(t *testing.T) { + _, err := handleInt64PlanModifiers(schema.StringAttribute{}, modifiers) + if err == nil { + t.Error("expected error for type mismatch in handleInt64PlanModifiers") + } + }, + ) + + t.Run( + "list type mismatch", func(t *testing.T) { + _, err := handleListPlanModifiers(schema.StringAttribute{}, modifiers) + if err == nil { + t.Error("expected error for type mismatch in handleListPlanModifiers") + } + }, + ) +} diff --git a/stackit/internal/wait/sqlserverflexbeta/wait_test.go b/stackit/internal/wait/sqlserverflexbeta/wait_test.go index e1ccc9c5..35b66cf6 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait_test.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait_test.go @@ -20,7 +20,30 @@ type apiClientInstanceMocked struct { instanceGetFails bool } -func (a *apiClientInstanceMocked) GetInstanceRequestExecute(_ context.Context, _, _, _ string) (*sqlserverflex.GetInstanceResponse, error) { +func (a *apiClientInstanceMocked) GetDatabaseRequestExecute( + _ context.Context, + projectId string, + region string, + instanceId string, + databaseName string, +) (*sqlserverflex.GetDatabaseResponse, error) { + return nil, nil +} + +func (a *apiClientInstanceMocked) GetUserRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + userId int64, +) (*sqlserverflex.GetUserResponse, error) { + return nil, nil +} + +func (a *apiClientInstanceMocked) GetInstanceRequestExecute( + _ context.Context, + _, _, _ string, +) (*sqlserverflex.GetInstanceResponse, error) { if a.instanceGetFails { return nil, &oapierror.GenericOpenAPIError{ StatusCode: 500, @@ -111,26 +134,28 @@ func TestCreateInstanceWaitHandler(t *testing.T) { }, } for _, tt := range tests { - t.Run(tt.desc, func(t *testing.T) { - instanceId := "foo-bar" + t.Run( + tt.desc, func(t *testing.T) { + instanceId := "foo-bar" - apiClient := &apiClientInstanceMocked{ - instanceId: instanceId, - instanceState: tt.instanceState, - instanceGetFails: tt.instanceGetFails, - } + apiClient := &apiClientInstanceMocked{ + instanceId: instanceId, + instanceState: tt.instanceState, + instanceGetFails: tt.instanceGetFails, + } - handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") - gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) - if (err != nil) != tt.wantErr { - t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) - } + gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) + if (err != nil) != tt.wantErr { + t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) + } - if !cmp.Equal(gotRes, tt.wantRes) { - t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes) - } - }) + if !cmp.Equal(gotRes, tt.wantRes) { + t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes) + } + }, + ) } } @@ -179,34 +204,36 @@ func TestUpdateInstanceWaitHandler(t *testing.T) { }, } for _, tt := range tests { - t.Run(tt.desc, func(t *testing.T) { - instanceId := "foo-bar" + t.Run( + tt.desc, func(t *testing.T) { + instanceId := "foo-bar" - apiClient := &apiClientInstanceMocked{ - instanceId: instanceId, - instanceState: tt.instanceState, - instanceGetFails: tt.instanceGetFails, - } - - var wantRes *sqlserverflex.GetInstanceResponse - if tt.wantResp { - wantRes = &sqlserverflex.GetInstanceResponse{ - Id: &instanceId, - Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)), + apiClient := &apiClientInstanceMocked{ + instanceId: instanceId, + instanceState: tt.instanceState, + instanceGetFails: tt.instanceGetFails, } - } - handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + var wantRes *sqlserverflex.GetInstanceResponse + if tt.wantResp { + wantRes = &sqlserverflex.GetInstanceResponse{ + Id: &instanceId, + Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)), + } + } - gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) + handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") - if (err != nil) != tt.wantErr { - t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) - } - if !cmp.Equal(gotRes, wantRes) { - t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes) - } - }) + gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) + + if (err != nil) != tt.wantErr { + t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) + } + if !cmp.Equal(gotRes, wantRes) { + t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes) + } + }, + ) } } @@ -236,23 +263,25 @@ func TestDeleteInstanceWaitHandler(t *testing.T) { }, } for _, tt := range tests { - t.Run(tt.desc, func(t *testing.T) { - instanceId := "foo-bar" + t.Run( + tt.desc, func(t *testing.T) { + instanceId := "foo-bar" - apiClient := &apiClientInstanceMocked{ - instanceGetFails: tt.instanceGetFails, - instanceIsDeleted: tt.instanceState == InstanceStateSuccess, - instanceId: instanceId, - instanceState: tt.instanceState, - } + apiClient := &apiClientInstanceMocked{ + instanceGetFails: tt.instanceGetFails, + instanceIsDeleted: tt.instanceState == InstanceStateSuccess, + instanceId: instanceId, + instanceState: tt.instanceState, + } - handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") - _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background()) + _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background()) - if (err != nil) != tt.wantErr { - t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) - } - }) + if (err != nil) != tt.wantErr { + t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) + } + }, + ) } } From 6e23dab949286db03ee2ad2e95fe02dbc7b8e96d Mon Sep 17 00:00:00 2001 From: Marcel_Henselin Date: Tue, 10 Feb 2026 08:21:01 +0000 Subject: [PATCH 06/41] feat: create docs upload pipeline (#37) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/37 Reviewed-by: Andre_Harms --- .github/workflows/publish.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 2e01d519..b86dba35 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -130,3 +130,16 @@ jobs: cd release/ s3cmd put --recursive v1 s3://terraform-provider-privatepreview/ s3cmd put --recursive .well-known s3://terraform-provider-privatepreview/ + + - name: Import SSH key + run: | + mkdir -p ~/.ssh + echo "${{ secrets.DOCS_UPLOAD_SSH_KEY }}" > ~/.ssh/id_ed25519 + chmod 0600 ~/.ssh/id_ed25519 + + - name: Upload docs via scp + run: | + set -e + ssh -o StrictHostKeyChecking=no ubuntu@${{ vars.DOCS_SERVER_IP }} 'rm -rf /srv/www/docs' + echo "${{ github.ref_name }}" >docs/_version.txt + scp -o StrictHostKeyChecking=no -r docs ubuntu@${{ vars.DOCS_SERVER_IP }}:/srv/www/ From 68e4c137f166856e43f4eb88c15ac148b21d082f Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Tue, 10 Feb 2026 09:32:06 +0000 Subject: [PATCH 07/41] fix: fix build pipeline error (#38) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/38 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- cmd/cmd/build/build.go | 7 ++++- .../postgresflexalpha_database.md | 22 +++++++------- docs/data-sources/postgresflexalpha_user.md | 22 +++++++------- .../sqlserverflexalpha_database.md | 1 + .../sqlserverflexalpha_instance.md | 1 + docs/resources/postgresflexalpha_database.md | 18 ++++++------ docs/resources/postgresflexalpha_user.md | 22 +++++++------- docs/resources/sqlserverflexalpha_user.md | 29 ++++++++++--------- .../postgresflexalpha/database/functions.go | 2 +- .../postgresflexalpha/flavor/functions.go | 2 +- 10 files changed, 64 insertions(+), 62 deletions(-) diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go index 30fbf552..1226b876 100644 --- a/cmd/cmd/build/build.go +++ b/cmd/cmd/build/build.go @@ -678,7 +678,12 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error { } defer f.Close() - tmp, err := os.CreateTemp("", "replace-*") + root, err := getRoot() + if err != nil { + log.Fatal(err) + } + + tmp, err := os.CreateTemp(*root, "replace-*") if err != nil { return err } diff --git a/docs/data-sources/postgresflexalpha_database.md b/docs/data-sources/postgresflexalpha_database.md index 834d030c..95c115e3 100644 --- a/docs/data-sources/postgresflexalpha_database.md +++ b/docs/data-sources/postgresflexalpha_database.md @@ -3,12 +3,12 @@ page_title: "stackitprivatepreview_postgresflexalpha_database Data Source - stackitprivatepreview" subcategory: "" description: |- - Postgres Flex database resource schema. Must have a region specified in the provider configuration. + --- # stackitprivatepreview_postgresflexalpha_database (Data Source) -Postgres Flex database resource schema. Must have a `region` specified in the provider configuration. + ## Example Usage @@ -25,16 +25,14 @@ data "stackitprivatepreview_postgresflexalpha_database" "example" { ### Required -- `instance_id` (String) ID of the Postgres Flex instance. -- `project_id` (String) STACKIT project ID to which the instance is associated. - -### Optional - -- `database_id` (Number) Database ID. -- `name` (String) Database name. -- `region` (String) The resource region. If not defined, the provider region is used. +- `database_id` (Number) The ID of the database. +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed ### Read-Only -- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`,`database_id`". -- `owner` (String) Username of the database owner. +- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".", +- `name` (String) The name of the database. +- `owner` (String) The owner of the database. +- `tf_original_api_id` (Number) The id of the database. diff --git a/docs/data-sources/postgresflexalpha_user.md b/docs/data-sources/postgresflexalpha_user.md index 1cda4f62..c3553c7b 100644 --- a/docs/data-sources/postgresflexalpha_user.md +++ b/docs/data-sources/postgresflexalpha_user.md @@ -3,12 +3,12 @@ page_title: "stackitprivatepreview_postgresflexalpha_user Data Source - stackitprivatepreview" subcategory: "" description: |- - Postgres Flex user data source schema. Must have a region specified in the provider configuration. + --- # stackitprivatepreview_postgresflexalpha_user (Data Source) -Postgres Flex user data source schema. Must have a `region` specified in the provider configuration. + ## Example Usage @@ -25,20 +25,18 @@ data "stackitprivatepreview_postgresflexalpha_user" "example" { ### Required -- `instance_id` (String) ID of the PostgresFlex instance. -- `project_id` (String) STACKIT project ID to which the instance is associated. -- `user_id` (String) User ID. +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed +- `user_id` (Number) The ID of the user. ### Optional -- `region` (String) The resource region. If not defined, the provider region is used. +- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".", ### Read-Only -- `connection_string` (String) The connection string for the user to the instance. -- `host` (String) The host address for the user to connect to the instance. -- `id` (String) Terraform's internal data source. ID. It is structured as "`project_id`,`region`,`instance_id`,`user_id`". -- `port` (Number) The port number for the user to connect to the instance. -- `roles` (Set of String) The roles assigned to the user. +- `name` (String) The name of the user. +- `roles` (List of String) A list of user roles. - `status` (String) The current status of the user. -- `username` (String) The name of the user. +- `tf_original_api_id` (Number) The ID of the user. diff --git a/docs/data-sources/sqlserverflexalpha_database.md b/docs/data-sources/sqlserverflexalpha_database.md index 5db648f4..20e8a653 100644 --- a/docs/data-sources/sqlserverflexalpha_database.md +++ b/docs/data-sources/sqlserverflexalpha_database.md @@ -26,6 +26,7 @@ description: |- - `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. - `compatibility_level` (Number) CompatibilityLevel of the Database. +- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".", - `name` (String) The name of the database. - `owner` (String) The owner of the database. - `tf_original_api_id` (Number) The id of the database. diff --git a/docs/data-sources/sqlserverflexalpha_instance.md b/docs/data-sources/sqlserverflexalpha_instance.md index b05d7b8e..f209c424 100644 --- a/docs/data-sources/sqlserverflexalpha_instance.md +++ b/docs/data-sources/sqlserverflexalpha_instance.md @@ -34,6 +34,7 @@ data "stackitprivatepreview_sqlserverflexalpha_instance" "example" { - `edition` (String) Edition of the MSSQL server instance - `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) - `flavor_id` (String) The id of the instance flavor. +- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`\". - `is_deletable` (Boolean) Whether the instance can be deleted or not. - `name` (String) The name of the instance. - `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) diff --git a/docs/resources/postgresflexalpha_database.md b/docs/resources/postgresflexalpha_database.md index 8fdceeb5..7834287e 100644 --- a/docs/resources/postgresflexalpha_database.md +++ b/docs/resources/postgresflexalpha_database.md @@ -3,12 +3,12 @@ page_title: "stackitprivatepreview_postgresflexalpha_database Resource - stackitprivatepreview" subcategory: "" description: |- - Postgres Flex database resource schema. Must have a region specified in the provider configuration. + --- # stackitprivatepreview_postgresflexalpha_database (Resource) -Postgres Flex database resource schema. Must have a `region` specified in the provider configuration. + ## Example Usage @@ -32,16 +32,16 @@ import { ### Required -- `instance_id` (String) ID of the Postgres Flex instance. -- `name` (String) Database name. -- `owner` (String) Username of the database owner. -- `project_id` (String) STACKIT project ID to which the instance is associated. +- `name` (String) The name of the database. ### Optional -- `region` (String) The resource region. If not defined, the provider region is used. +- `database_id` (Number) The ID of the database. +- `instance_id` (String) The ID of the instance. +- `owner` (String) The owner of the database. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed ### Read-Only -- `database_id` (Number) Database ID. -- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`,`database_id`". +- `id` (Number) The id of the database. diff --git a/docs/resources/postgresflexalpha_user.md b/docs/resources/postgresflexalpha_user.md index d3b12f9d..2ebffe71 100644 --- a/docs/resources/postgresflexalpha_user.md +++ b/docs/resources/postgresflexalpha_user.md @@ -3,12 +3,12 @@ page_title: "stackitprivatepreview_postgresflexalpha_user Resource - stackitprivatepreview" subcategory: "" description: |- - Postgres Flex user resource schema. Must have a region specified in the provider configuration. + --- # stackitprivatepreview_postgresflexalpha_user (Resource) -Postgres Flex user resource schema. Must have a `region` specified in the provider configuration. + ## Example Usage @@ -32,21 +32,19 @@ import { ### Required -- `instance_id` (String) ID of the PostgresFlex instance. -- `project_id` (String) STACKIT project ID to which the instance is associated. -- `roles` (Set of String) Database access levels for the user. Possible values are: `login`, `createdb`, `createrole`. -- `username` (String) The name of the user. +- `name` (String) The name of the user. ### Optional -- `region` (String) The resource region. If not defined, the provider region is used. +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed +- `roles` (List of String) A list containing the user roles for the instance. +- `user_id` (Number) The ID of the user. ### Read-Only - `connection_string` (String) The connection string for the user to the instance. -- `host` (String) The host of the Postgres Flex instance. -- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`,`user_id`". -- `password` (String, Sensitive) The password for the user. This is only set upon creation. -- `port` (Number) The port of the Postgres Flex instance. +- `id` (Number) The ID of the user. +- `password` (String) The password for the user. - `status` (String) The current status of the user. -- `user_id` (Number) User ID. diff --git a/docs/resources/sqlserverflexalpha_user.md b/docs/resources/sqlserverflexalpha_user.md index 3f37556c..8c5f8702 100644 --- a/docs/resources/sqlserverflexalpha_user.md +++ b/docs/resources/sqlserverflexalpha_user.md @@ -3,12 +3,12 @@ page_title: "stackitprivatepreview_sqlserverflexalpha_user Resource - stackitprivatepreview" subcategory: "" description: |- - SQLServer Flex user resource schema. Must have a region specified in the provider configuration. + --- # stackitprivatepreview_sqlserverflexalpha_user (Resource) -SQLServer Flex user resource schema. Must have a `region` specified in the provider configuration. + ## Example Usage @@ -32,21 +32,22 @@ import { ### Required -- `instance_id` (String) ID of the SQLServer Flex instance. -- `project_id` (String) STACKIT project ID to which the instance is associated. -- `roles` (Set of String) Database access levels for the user. The values for the default roles are: `##STACKIT_DatabaseManager##`, `##STACKIT_LoginManager##`, `##STACKIT_ProcessManager##`, `##STACKIT_ServerManager##`, `##STACKIT_SQLAgentManager##`, `##STACKIT_SQLAgentUser##` -- `username` (String) Username of the SQLServer Flex instance. +- `roles` (List of String) A list containing the user roles for the instance. +- `username` (String) The name of the user. ### Optional -- `region` (String) +- `default_database` (String) The default database for a user of the instance. +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed +- `user_id` (Number) The ID of the user. ### Read-Only -- `default_database` (String) -- `host` (String) -- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`,`user_id`". -- `password` (String, Sensitive) Password of the user account. -- `port` (Number) -- `status` (String) -- `user_id` (Number) User ID. +- `host` (String) The host of the instance in which the user belongs to. +- `id` (Number) The ID of the user. +- `password` (String) The password for the user. +- `port` (Number) The port of the instance in which the user belongs to. +- `status` (String) The current status of the user. +- `uri` (String) The connection string for the user to the instance. diff --git a/stackit/internal/services/postgresflexalpha/database/functions.go b/stackit/internal/services/postgresflexalpha/database/functions.go index 4496faa1..e67f4926 100644 --- a/stackit/internal/services/postgresflexalpha/database/functions.go +++ b/stackit/internal/services/postgresflexalpha/database/functions.go @@ -59,7 +59,7 @@ func getDatabase( for page := int32(1); ; page++ { res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId). - Page(page).Size(pageSize).Sort(postgresflex.DATABASESORT_INDEX_ASC).Execute() + Page(page).Size(pageSize).Sort(postgresflex.DATABASESORT_DATABASE_ID_ASC).Execute() if err != nil { return nil, fmt.Errorf("requesting database list (page %d): %w", page, err) } diff --git a/stackit/internal/services/postgresflexalpha/flavor/functions.go b/stackit/internal/services/postgresflexalpha/flavor/functions.go index 5a631bc7..67c7f9fa 100644 --- a/stackit/internal/services/postgresflexalpha/flavor/functions.go +++ b/stackit/internal/services/postgresflexalpha/flavor/functions.go @@ -44,7 +44,7 @@ func getFlavorsByFilter( for page := int32(1); ; page++ { res, err := client.GetFlavorsRequest(ctx, projectId, region). - Page(page).Size(pageSize).Sort(postgresflex.FLAVORSORT_INDEX_ASC).Execute() + Page(page).Size(pageSize).Sort(postgresflex.FLAVORSORT_ID_ASC).Execute() if err != nil { return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err) } From b63526b065b4c2a3082443b16df318ea91584b83 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Tue, 10 Feb 2026 09:56:54 +0000 Subject: [PATCH 08/41] chore: regenerate gen files (#40) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/40 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../database/datasources_gen/databases_data_source_gen.go | 2 -- .../flavors/datasources_gen/flavors_data_source_gen.go | 2 -- .../instance/datasources_gen/instances_data_source_gen.go | 2 -- .../user/datasources_gen/users_data_source_gen.go | 2 -- 4 files changed, 8 deletions(-) diff --git a/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go b/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go index 7e3e1eec..a533e5ca 100644 --- a/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go +++ b/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go @@ -126,8 +126,6 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema { "database_name.asc", "database_owner.desc", "database_owner.asc", - "index.asc", - "index.desc", ), }, }, diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go index dbfe5cc9..e0b76221 100644 --- a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go +++ b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go @@ -151,8 +151,6 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema { MarkdownDescription: "Sorting of the flavors to be returned on each page.", Validators: []validator.String{ stringvalidator.OneOf( - "index.desc", - "index.asc", "cpu.desc", "cpu.asc", "flavor_description.asc", diff --git a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go index beb620dd..0407c13f 100644 --- a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go +++ b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go @@ -113,8 +113,6 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema { MarkdownDescription: "Sorting of the items to be returned on each page.", Validators: []validator.String{ stringvalidator.OneOf( - "index.desc", - "index.asc", "id.desc", "id.asc", "is_deletable.desc", diff --git a/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go b/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go index b54a5dd6..bc83be6b 100644 --- a/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go +++ b/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go @@ -86,8 +86,6 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema { stringvalidator.OneOf( "id.asc", "id.desc", - "index.desc", - "index.asc", "name.desc", "name.asc", "status.desc", From 00a43dfb4c29b749f129aa197ec8777a0c6b1c22 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Tue, 10 Feb 2026 12:25:19 +0000 Subject: [PATCH 09/41] fix: fix wrong identity handling in Read (#41) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/41 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../postgresflexalpha/instance/resource.go | 25 ------------------- 1 file changed, 25 deletions(-) diff --git a/stackit/internal/services/postgresflexalpha/instance/resource.go b/stackit/internal/services/postgresflexalpha/instance/resource.go index 78bb0572..3d603df0 100644 --- a/stackit/internal/services/postgresflexalpha/instance/resource.go +++ b/stackit/internal/services/postgresflexalpha/instance/resource.go @@ -324,13 +324,6 @@ func (r *instanceResource) Read( return } - // Read identity data - var identityData InstanceResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) // projectId := model.ProjectId.ValueString() @@ -340,34 +333,16 @@ func (r *instanceResource) Read( var projectId string if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { projectId = model.ProjectId.ValueString() - } else { - if identityData.ProjectID.IsNull() || identityData.ProjectID.IsUnknown() { - core.LogAndAddError(ctx, &resp.Diagnostics, functionErrorSummary, "project_id not found in config") - return - } - projectId = identityData.ProjectID.ValueString() } var region string if !model.Region.IsNull() && !model.Region.IsUnknown() { region = r.providerData.GetRegionWithOverride(model.Region) - } else { - if identityData.Region.IsNull() || identityData.Region.IsUnknown() { - core.LogAndAddError(ctx, &resp.Diagnostics, functionErrorSummary, "region not found in config") - return - } - region = r.providerData.GetRegionWithOverride(identityData.Region) } var instanceId string if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() { instanceId = model.InstanceId.ValueString() - } else { - if identityData.InstanceID.IsNull() || identityData.InstanceID.IsUnknown() { - core.LogAndAddError(ctx, &resp.Diagnostics, functionErrorSummary, "instance_id not found in config") - return - } - instanceId = identityData.InstanceID.ValueString() } ctx = tflog.SetField(ctx, "project_id", projectId) From 9dbf36dd352ce292365b78824a35a13f9a3673db Mon Sep 17 00:00:00 2001 From: "marcel.henselin" Date: Tue, 10 Feb 2026 13:31:42 +0000 Subject: [PATCH 10/41] chore: activate darwin and windows builds (#43) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/43 Co-authored-by: marcel.henselin Co-committed-by: marcel.henselin --- .goreleaser.yaml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.goreleaser.yaml b/.goreleaser.yaml index 3e9105ca..e0aafe37 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -19,20 +19,20 @@ builds: ldflags: - '-s -w -X main.version={{.Version}} -X main.commit={{.Commit}}' goos: -# - freebsd -# - windows + - freebsd + - windows - linux - darwin goarch: - amd64 -# - '386' -# - arm + - '386' + - arm - arm64 -# ignore: -# - goos: darwin -# goarch: '386' -# - goos: windows -# goarch: arm + ignore: + - goos: darwin + goarch: '386' + - goos: windows + goarch: arm binary: '{{ .ProjectName }}_v{{ .Version }}' archives: - formats: [ 'zip' ] From b737875c68885e2f2f2e177e3af74fbd4c2d5162 Mon Sep 17 00:00:00 2001 From: Marcel_Henselin Date: Tue, 10 Feb 2026 13:32:15 +0000 Subject: [PATCH 11/41] fix: fix README.md (#42) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/42 Reviewed-by: Andre_Harms --- README.md | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 018f615e..ab79f28e 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,14 @@
-
STACKIT logo -
-
-# STACKIT Terraform Provider (PRIVATE PREVIEW) +# STACKIT Terraform Provider
(PRIVATE PREVIEW) [![GitHub Release](https://img.shields.io/github/v/release/stackitcloud/terraform-provider-stackit)](https://registry.terraform.io/providers/stackitcloud/stackit/latest) ![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/stackitcloud/terraform-provider-stackit) [![GitHub License](https://img.shields.io/github/license/stackitcloud/terraform-provider-stackit)](https://www.apache.org/licenses/LICENSE-2.0) -This project is the official [Terraform Provider](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs) for [STACKIT](https://www.stackit.de/en/), which allows you to manage STACKIT resources through Terraform. +This project is the **NOT** official [Terraform Provider](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs) for [STACKIT](https://www.stackit.de/en/)! + +This a **private preview only**, which allows you to manage STACKIT resources through Terraform. ## Getting Started @@ -18,20 +17,22 @@ To install the [STACKIT Terraform Provider](https://registry.terraform.io/provid ```hcl terraform { required_providers { - stackit = { - source = "stackitcloud/stackit" - version = "X.X.X" + stackitprivatepreview = { + source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview" + version = "= 0.0.5-alpha" } } } -provider "stackit" { +provider "stackitprivatepreview" { # Configuration options } ``` Check one of the examples in the [examples](examples/) folder. +TODO: revise the following sections + ## Authentication To authenticate, you will need a [service account](https://docs.stackit.cloud/platform/access-and-identity/service-accounts/). Create it in the [STACKIT Portal](https://portal.stackit.cloud/) and assign the necessary permissions to it, e.g. `project.owner`. There are multiple ways to authenticate: From 4991897eca105fe618dc04ebd231df5c3d6f6152 Mon Sep 17 00:00:00 2001 From: Andre Harms Date: Tue, 10 Feb 2026 16:06:10 +0000 Subject: [PATCH 12/41] fix: fix mapping tests (#44) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/44 Reviewed-by: Marcel_Henselin Co-authored-by: Andre Harms Co-committed-by: Andre Harms --- .../postgresflexalpha/database/mapper_test.go | 7 +++- .../postgresflexalpha/instance/functions.go | 38 ++++++++++++++++++- .../services/postgresflexalpha/main.go | 1 - .../postgresflexalpha/user/mapper_test.go | 5 ++- .../services/sqlserverflexalpha/main.go | 1 - .../sqlserverflexalpha/user/resource.go | 2 + .../internal/wait/postgresflexalpha/wait.go | 14 ++++--- 7 files changed, 56 insertions(+), 12 deletions(-) delete mode 100644 stackit/internal/services/postgresflexalpha/main.go delete mode 100644 stackit/internal/services/sqlserverflexalpha/main.go diff --git a/stackit/internal/services/postgresflexalpha/database/mapper_test.go b/stackit/internal/services/postgresflexalpha/database/mapper_test.go index a2f18c12..97212237 100644 --- a/stackit/internal/services/postgresflexalpha/database/mapper_test.go +++ b/stackit/internal/services/postgresflexalpha/database/mapper_test.go @@ -34,7 +34,12 @@ func TestMapFields(t *testing.T) { Name: utils.Ptr("my-db"), Owner: utils.Ptr("\"my-owner\""), }, - model: &dataSourceModel{}, + model: &dataSourceModel{ + DatabaseModel: datasource.DatabaseModel{ + ProjectId: types.StringValue("my-project"), + InstanceId: types.StringValue("my-instance"), + }, + }, region: "eu01", }, expected: expected{ diff --git a/stackit/internal/services/postgresflexalpha/instance/functions.go b/stackit/internal/services/postgresflexalpha/instance/functions.go index 862f88ff..93fb544d 100644 --- a/stackit/internal/services/postgresflexalpha/instance/functions.go +++ b/stackit/internal/services/postgresflexalpha/instance/functions.go @@ -55,6 +55,22 @@ func mapGetInstanceResponseToModel( ) } + isConnectionInfoIncomplete := resp.ConnectionInfo == nil || + resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" || + resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0 + + if isConnectionInfoIncomplete { + m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull() + } else { + m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust( + postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "host": types.StringPointerValue(resp.ConnectionInfo.Host), + "port": types.Int64PointerValue(resp.ConnectionInfo.Port), + }, + ) + } + m.ConnectionInfo.Host = types.StringValue("") if host, ok := resp.ConnectionInfo.GetHostOk(); ok { m.ConnectionInfo.Host = types.StringValue(host) @@ -138,8 +154,8 @@ func mapGetDataInstanceResponseToModel( ) error { m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) handleEncryption(m, resp) - m.ConnectionInfo.Host = types.StringValue(resp.ConnectionInfo.GetHost()) - m.ConnectionInfo.Port = types.Int64Value(resp.ConnectionInfo.GetPort()) + handleConnectionInfo(ctx, m, resp) + m.FlavorId = types.StringValue(resp.GetFlavorId()) m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString()) m.InstanceId = types.StringPointerValue(resp.Id) @@ -169,6 +185,24 @@ func mapGetDataInstanceResponseToModel( return nil } +func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) { + isConnectionInfoIncomplete := resp.ConnectionInfo == nil || + resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" || + resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0 + + if isConnectionInfoIncomplete { + m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull() + } else { + m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust( + postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "host": types.StringPointerValue(resp.ConnectionInfo.Host), + "port": types.Int64PointerValue(resp.ConnectionInfo.Port), + }, + ) + } +} + func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) error { netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl()) if diags.HasError() { diff --git a/stackit/internal/services/postgresflexalpha/main.go b/stackit/internal/services/postgresflexalpha/main.go deleted file mode 100644 index 5e20f208..00000000 --- a/stackit/internal/services/postgresflexalpha/main.go +++ /dev/null @@ -1 +0,0 @@ -package postgresflexalpha diff --git a/stackit/internal/services/postgresflexalpha/user/mapper_test.go b/stackit/internal/services/postgresflexalpha/user/mapper_test.go index 6eeff9f0..37410b19 100644 --- a/stackit/internal/services/postgresflexalpha/user/mapper_test.go +++ b/stackit/internal/services/postgresflexalpha/user/mapper_test.go @@ -164,6 +164,7 @@ func TestMapFieldsCreate(t *testing.T) { }, testRegion, resourceModel{ + Id: types.Int64Value(1), UserId: types.Int64Value(1), InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), @@ -185,6 +186,7 @@ func TestMapFieldsCreate(t *testing.T) { }, testRegion, resourceModel{ + Id: types.Int64Value(1), UserId: types.Int64Value(1), InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), @@ -193,7 +195,7 @@ func TestMapFieldsCreate(t *testing.T) { Password: types.StringNull(), Region: types.StringValue(testRegion), Status: types.StringValue("status"), - ConnectionString: types.StringValue("connection_string"), + ConnectionString: types.StringNull(), }, true, }, @@ -206,6 +208,7 @@ func TestMapFieldsCreate(t *testing.T) { }, testRegion, resourceModel{ + Id: types.Int64Value(1), UserId: types.Int64Value(1), InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), diff --git a/stackit/internal/services/sqlserverflexalpha/main.go b/stackit/internal/services/sqlserverflexalpha/main.go deleted file mode 100644 index 7ec38cdc..00000000 --- a/stackit/internal/services/sqlserverflexalpha/main.go +++ /dev/null @@ -1 +0,0 @@ -package sqlserverflexalpha diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index 41e6e1c5..a24cad8b 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -416,6 +416,7 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *res return fmt.Errorf("user id not present") } userId := *user.Id + model.Id = types.Int64Value(userId) model.UserId = types.Int64Value(userId) model.Username = types.StringPointerValue(user.Username) @@ -467,6 +468,7 @@ func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *resourceMode return fmt.Errorf("user id not present") } + model.Id = types.Int64Value(userId) model.UserId = types.Int64Value(userId) model.Username = types.StringPointerValue(user.Username) diff --git a/stackit/internal/wait/postgresflexalpha/wait.go b/stackit/internal/wait/postgresflexalpha/wait.go index 5177e6f1..c490b605 100644 --- a/stackit/internal/wait/postgresflexalpha/wait.go +++ b/stackit/internal/wait/postgresflexalpha/wait.go @@ -67,9 +67,11 @@ func CreateInstanceWaitHandler( if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { return false, nil, nil } - tflog.Debug(ctx, "waiting for instance ready", map[string]interface{}{ - "status": *s.Status, - }) + tflog.Debug( + ctx, "waiting for instance ready", map[string]interface{}{ + "status": *s.Status, + }, + ) switch *s.Status { default: return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, *s.Status) @@ -118,13 +120,13 @@ func CreateInstanceWaitHandler( instanceCreated = true instanceGetResponse = s case InstanceStateSuccess: - if *s.Network.AccessScope == "SNA" { - if s.Network == nil || s.Network.InstanceAddress == nil { + if s.Network != nil && s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" { + if s.Network.InstanceAddress == nil { tflog.Warn(ctx, "Waiting for instance_address") return false, nil, nil } if s.Network.RouterAddress == nil { - tflog.Info(ctx, "Waiting for router_address") + tflog.Warn(ctx, "Waiting for router_address") return false, nil, nil } } From e21fe6432631c233b31864bce41ad6379b21d30e Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Tue, 10 Feb 2026 16:46:21 +0000 Subject: [PATCH 13/41] feat: add_testing (#45) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/45 Reviewed-by: Andre_Harms Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .github/actions/setup-cache-go/action.yaml | 61 + .github/workflows/ci.yaml | 8 + Makefile | 7 +- cmd/cmd/build/build.go | 2 - .../internal => internal}/testutil/assert.go | 0 .../testutil/testutil.go.bak | 2 - .../testutil/testutil_test.go.bak | 0 internal/testutils/functions.go | 125 ++ internal/testutils/helpers.go | 465 ++++++ internal/testutils/testutils.go | 219 +++ internal/testutils/testutils_test.go | 48 + .../instance/resource_test.go | 280 ---- .../postgresflexalpha/instance/schema_test.go | 33 - .../postgresflex_acc_test.go | 1402 ++++++++++------- .../testdata/instance_template.gompl | 30 + .../{resource-complete.tf => resource-enc.tf} | 15 +- .../testdata/resource-no-enc.tf | 19 + .../services/postgresflexalpha/user/mapper.go | 2 +- .../postgresflexalpha/user/resource.go | 40 - .../sqlserverflex_acc_test.go | 16 +- .../sqlserverflexalpha/user/resource_test.go | 327 ++-- .../sqlserverflexbeta/instance/functions.go | 25 +- .../instance/resource_test.go | 437 ----- .../sqlserverflexbeta/user/functions.go | 85 +- .../sqlserverflexbeta/user/resource.go | 12 + stackit/provider.go | 3 +- stackit/provider_acc_test.go | 188 +-- stackit/testdata/provider-all-attributes.tf | 14 +- stackit/testdata/provider-credentials.tf | 19 +- .../testdata/provider-invalid-attribute.tf | 16 +- 30 files changed, 2097 insertions(+), 1803 deletions(-) create mode 100644 .github/actions/setup-cache-go/action.yaml rename {stackit/internal => internal}/testutil/assert.go (100%) rename stackit/internal/testutil/testutil.go => internal/testutil/testutil.go.bak (99%) rename stackit/internal/testutil/testutil_test.go => internal/testutil/testutil_test.go.bak (100%) create mode 100644 internal/testutils/functions.go create mode 100644 internal/testutils/helpers.go create mode 100644 internal/testutils/testutils.go create mode 100644 internal/testutils/testutils_test.go delete mode 100644 stackit/internal/services/postgresflexalpha/instance/resource_test.go delete mode 100644 stackit/internal/services/postgresflexalpha/instance/schema_test.go create mode 100644 stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl rename stackit/internal/services/postgresflexalpha/testdata/{resource-complete.tf => resource-enc.tf} (57%) create mode 100644 stackit/internal/services/postgresflexalpha/testdata/resource-no-enc.tf delete mode 100644 stackit/internal/services/sqlserverflexbeta/instance/resource_test.go diff --git a/.github/actions/setup-cache-go/action.yaml b/.github/actions/setup-cache-go/action.yaml new file mode 100644 index 00000000..8837ca59 --- /dev/null +++ b/.github/actions/setup-cache-go/action.yaml @@ -0,0 +1,61 @@ +# SPDX-License-Identifier: MIT +name: 'Forgejo Actions to setup Go and cache dependencies' +author: 'Forgejo authors' +description: | + Wrap the setup-go with improved dependency caching. +inputs: + username: + description: 'User for which to manage the dependency cache' + default: root + +runs: + using: "composite" + steps: + - name: "Install zstd for faster caching" + run: | + apt-get update -qq + apt-get -q install -qq -y zstd + + - name: "Set up Go using setup-go" + uses: https://data.forgejo.org/actions/setup-go@v6 + id: go-version + with: + go-version-file: "go.mod" + # do not cache dependencies, we do this manually + cache: false + + - name: "Get go environment information" + id: go-environment + run: | + chmod 755 $HOME # ensure ${RUN_AS_USER} has permission when go is located in $HOME + export GOROOT="$(go env GOROOT)" + echo "modcache=$(su ${RUN_AS_USER} -c '${GOROOT}/bin/go env GOMODCACHE')" >> "$GITHUB_OUTPUT" + echo "cache=$(su ${RUN_AS_USER} -c '${GOROOT}/bin/go env GOCACHE')" >> "$GITHUB_OUTPUT" + env: + RUN_AS_USER: ${{ inputs.username }} + GO_VERSION: ${{ steps.go-version.outputs.go-version }} + + - name: "Create cache folders with correct permissions (for non-root users)" + if: inputs.username != 'root' + # when the cache is restored, only the permissions of the last part are restored + # so assuming that /home/user exists and we are restoring /home/user/go/pkg/mod, + # both folders will have the correct permissions, but + # /home/user/go and /home/user/go/pkg might be owned by root + run: | + su ${RUN_AS_USER} -c 'mkdir -p "${MODCACHE_DIR}" "${CACHE_DIR}"' + env: + RUN_AS_USER: ${{ inputs.username }} + MODCACHE_DIR: ${{ steps.go-environment.outputs.modcache }} + CACHE_DIR: ${{ steps.go-environment.outputs.cache }} + + - name: "Restore Go dependencies from cache or mark for later caching" + id: cache-deps + uses: https://code.forgejo.org/actions/cache@v5 + with: + key: setup-cache-go-deps-${{ runner.os }}-${{ inputs.username }}-${{ steps.go-version.outputs.go_version }}-${{ hashFiles('go.sum', 'go.mod') }} + restore-keys: | + setup-cache-go-deps-${{ runner.os }}-${{ inputs.username }}-${{ steps.go-version.outputs.go_version }}- + setup-cache-go-deps-${{ runner.os }}-${{ inputs.username }}- + path: | + ${{ steps.go-environment.outputs.modcache }} + ${{ steps.go-environment.outputs.cache }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 186d2b42..ac9771a6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -6,6 +6,11 @@ on: - alpha - main workflow_dispatch: + schedule: + # every sunday at 00:00 + # - cron: '0 0 * * 0' + # every day at 00:00 + - cron: '0 0 * * *' push: branches: - '!main' @@ -100,7 +105,9 @@ jobs: --version=${VERSION} + main: + if: ${{ github.event_name != 'schedule' }} name: CI runs-on: ubuntu-latest needs: config @@ -146,6 +153,7 @@ jobs: path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}" config: + if: ${{ github.event_name != 'schedule' }} name: Check GoReleaser config runs-on: ubuntu-latest steps: diff --git a/Makefile b/Makefile index c6b3f9ac..f678f099 100644 --- a/Makefile +++ b/Makefile @@ -34,15 +34,16 @@ fmt: @terraform fmt -diff -recursive # TEST +.PHONY: test coverage test: @echo "Running tests for the terraform provider" - @cd $(ROOT_DIR)/stackit && go test ./... -count=1 -coverprofile=coverage.out && cd $(ROOT_DIR) + @cd $(ROOT_DIR)/stackit && go test ./... -count=1 -coverprofile=../coverage.out && cd $(ROOT_DIR) # Test coverage coverage: @echo ">> Creating test coverage report for the terraform provider" - @cd $(ROOT_DIR)/stackit && (go test ./... -count=1 -coverprofile=coverage.out || true) && cd $(ROOT_DIR) - @cd $(ROOT_DIR)/stackit && go tool cover -html=coverage.out -o coverage.html && cd $(ROOT_DIR) + @cd $(ROOT_DIR)/stackit && (go test ./... -count=1 -coverprofile=../coverage.out || true) && cd $(ROOT_DIR) + @cd $(ROOT_DIR)/stackit && go tool cover -html=../coverage.out -o ../coverage.html && cd $(ROOT_DIR) test-acceptance-tf: @if [ -z $(TF_ACC_PROJECT_ID) ]; then echo "Input TF_ACC_PROJECT_ID missing"; exit 1; fi diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go index 1226b876..c80401fa 100644 --- a/cmd/cmd/build/build.go +++ b/cmd/cmd/build/build.go @@ -31,8 +31,6 @@ const ( GEN_REPO = "https://github.com/stackitcloud/stackit-sdk-generator.git" ) -var supportedVersions = []string{"alpha", "beta"} - type version struct { verString string major int diff --git a/stackit/internal/testutil/assert.go b/internal/testutil/assert.go similarity index 100% rename from stackit/internal/testutil/assert.go rename to internal/testutil/assert.go diff --git a/stackit/internal/testutil/testutil.go b/internal/testutil/testutil.go.bak similarity index 99% rename from stackit/internal/testutil/testutil.go rename to internal/testutil/testutil.go.bak index b678efef..2756677f 100644 --- a/stackit/internal/testutil/testutil.go +++ b/internal/testutil/testutil.go.bak @@ -1,5 +1,3 @@ -// Copyright (c) STACKIT - package testutil import ( diff --git a/stackit/internal/testutil/testutil_test.go b/internal/testutil/testutil_test.go.bak similarity index 100% rename from stackit/internal/testutil/testutil_test.go rename to internal/testutil/testutil_test.go.bak diff --git a/internal/testutils/functions.go b/internal/testutils/functions.go new file mode 100644 index 00000000..e672d57c --- /dev/null +++ b/internal/testutils/functions.go @@ -0,0 +1,125 @@ +package testutils + +import ( + "bytes" + "fmt" + "log" + "os" + "path" + "path/filepath" + "runtime" + "strings" + "testing" + "text/template" +) + +// GetHomeEnvVariableName Helper function to obtain the home directory on different systems. +// Based on os.UserHomeDir(). +func GetHomeEnvVariableName() string { + env := "HOME" + switch runtime.GOOS { + case "windows": + env = "USERPROFILE" + case "plan9": + env = "home" + } + return env +} + +// CreateTemporaryHome create temporary home and initialize the credentials file as well +func CreateTemporaryHome(createValidCredentialsFile bool, t *testing.T) string { + // create a temporary file + tempHome, err := os.MkdirTemp("", "tempHome") + if err != nil { + t.Fatalf("Failed to create temporary home directory: %v", err) + } + + // create credentials file in temp directory + stackitFolder := path.Join(tempHome, ".stackit") + if err := os.Mkdir(stackitFolder, 0o750); err != nil { + t.Fatalf("Failed to create stackit folder: %v", err) + } + + filePath := path.Join(stackitFolder, "credentials.json") + file, err := os.Create(filePath) + if err != nil { + t.Fatalf("Failed to create credentials file: %v", err) + } + defer func() { + if err := file.Close(); err != nil { + t.Fatalf("Error while closing the file: %v", err) + } + }() + + // Define content, default = invalid token + token := "foo_token" + if createValidCredentialsFile { + token = GetTestProjectServiceAccountJson("") + } + if _, err = file.WriteString(token); err != nil { + t.Fatalf("Error writing to file: %v", err) + } + + return tempHome +} + +// SetTemporaryHome Function to overwrite the home folder +func SetTemporaryHome(tempHomePath string) { + env := GetHomeEnvVariableName() + if err := os.Setenv(env, tempHomePath); err != nil { + fmt.Printf("Error setting temporary home directory %v", err) + } +} + +// CleanupTemporaryHome cleanup the temporary home and reset the environment variable +func CleanupTemporaryHome(tempHomePath string, t *testing.T) { + if err := os.RemoveAll(tempHomePath); err != nil { + t.Fatalf("Error cleaning up temporary folder: %v", err) + } + originalHomeDir, err := os.UserHomeDir() + if err != nil { + t.Fatalf("Failed to restore home directory back to normal: %v", err) + } + // revert back to original home folder + env := GetHomeEnvVariableName() + if err := os.Setenv(env, originalHomeDir); err != nil { + fmt.Printf("Error resetting temporary home directory %v", err) + } +} + +func ucFirst(s string) string { + if len(s) == 0 { + return "" + } + return strings.ToUpper(s[:1]) + s[1:] +} + +func StringFromTemplateMust(tplFile string, data any) string { + res, err := StringFromTemplate(tplFile, data) + if err != nil { + log.Fatalln(err) + } + return res +} + +func StringFromTemplate(tplFile string, data any) (string, error) { + fn := template.FuncMap{ + "ucfirst": ucFirst, + } + + file := filepath.Base(tplFile) + + tmpl, err := template.New(file).Funcs(fn).ParseFiles(tplFile) + if err != nil { + return "", err + } + + tplBuf := &bytes.Buffer{} + + err = tmpl.Execute(tplBuf, data) + if err != nil { + return "", err + } + + return tplBuf.String(), nil +} diff --git a/internal/testutils/helpers.go b/internal/testutils/helpers.go new file mode 100644 index 00000000..c8d063dc --- /dev/null +++ b/internal/testutils/helpers.go @@ -0,0 +1,465 @@ +package testutils + +import ( + "fmt" + "os" +) + +var ( + CdnCustomEndpoint = os.Getenv("TF_ACC_CDN_CUSTOM_ENDPOINT") + DnsCustomEndpoint = os.Getenv("TF_ACC_DNS_CUSTOM_ENDPOINT") + GitCustomEndpoint = os.Getenv("TF_ACC_GIT_CUSTOM_ENDPOINT") + IaaSCustomEndpoint = os.Getenv("TF_ACC_IAAS_CUSTOM_ENDPOINT") + KMSCustomEndpoint = os.Getenv("TF_ACC_KMS_CUSTOM_ENDPOINT") + LoadBalancerCustomEndpoint = os.Getenv("TF_ACC_LOADBALANCER_CUSTOM_ENDPOINT") + LogMeCustomEndpoint = os.Getenv("TF_ACC_LOGME_CUSTOM_ENDPOINT") + MariaDBCustomEndpoint = os.Getenv("TF_ACC_MARIADB_CUSTOM_ENDPOINT") + ModelServingCustomEndpoint = os.Getenv("TF_ACC_MODELSERVING_CUSTOM_ENDPOINT") + AuthorizationCustomEndpoint = os.Getenv("TF_ACC_authorization_custom_endpoint") + MongoDBFlexCustomEndpoint = os.Getenv("TF_ACC_MONGODBFLEX_CUSTOM_ENDPOINT") + OpenSearchCustomEndpoint = os.Getenv("TF_ACC_OPENSEARCH_CUSTOM_ENDPOINT") + ObservabilityCustomEndpoint = os.Getenv("TF_ACC_OBSERVABILITY_CUSTOM_ENDPOINT") + ObjectStorageCustomEndpoint = os.Getenv("TF_ACC_OBJECTSTORAGE_CUSTOM_ENDPOINT") + PostgresFlexCustomEndpoint = os.Getenv("TF_ACC_POSTGRESFLEX_CUSTOM_ENDPOINT") + RabbitMQCustomEndpoint = os.Getenv("TF_ACC_RABBITMQ_CUSTOM_ENDPOINT") + RedisCustomEndpoint = os.Getenv("TF_ACC_REDIS_CUSTOM_ENDPOINT") + ResourceManagerCustomEndpoint = os.Getenv("TF_ACC_RESOURCEMANAGER_CUSTOM_ENDPOINT") + ScfCustomEndpoint = os.Getenv("TF_ACC_SCF_CUSTOM_ENDPOINT") + SecretsManagerCustomEndpoint = os.Getenv("TF_ACC_SECRETSMANAGER_CUSTOM_ENDPOINT") + SQLServerFlexCustomEndpoint = os.Getenv("TF_ACC_SQLSERVERFLEX_CUSTOM_ENDPOINT") + ServerBackupCustomEndpoint = os.Getenv("TF_ACC_SERVER_BACKUP_CUSTOM_ENDPOINT") + ServerUpdateCustomEndpoint = os.Getenv("TF_ACC_SERVER_UPDATE_CUSTOM_ENDPOINT") + ServiceAccountCustomEndpoint = os.Getenv("TF_ACC_SERVICE_ACCOUNT_CUSTOM_ENDPOINT") + SKECustomEndpoint = os.Getenv("TF_ACC_SKE_CUSTOM_ENDPOINT") +) + +func ObservabilityProviderConfig() string { + if ObservabilityCustomEndpoint == "" { + return `provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + observability_custom_endpoint = "%s" + }`, + ObservabilityCustomEndpoint, + ) +} + +func CdnProviderConfig() string { + if CdnCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + enable_beta_resources = true + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + cdn_custom_endpoint = "%s" + enable_beta_resources = true + }`, + CdnCustomEndpoint, + ) +} + +func DnsProviderConfig() string { + if DnsCustomEndpoint == "" { + return `provider "stackitprivatepreview" {}` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + dns_custom_endpoint = "%s" + }`, + DnsCustomEndpoint, + ) +} + +func IaaSProviderConfig() string { + if IaaSCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + iaas_custom_endpoint = "%s" + }`, + IaaSCustomEndpoint, + ) +} + +func IaaSProviderConfigWithBetaResourcesEnabled() string { + if IaaSCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + enable_beta_resources = true + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + enable_beta_resources = true + iaas_custom_endpoint = "%s" + }`, + IaaSCustomEndpoint, + ) +} + +func IaaSProviderConfigWithExperiments() string { + if IaaSCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + experiments = [ "routing-tables", "network" ] + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + iaas_custom_endpoint = "%s" + experiments = [ "routing-tables", "network" ] + }`, + IaaSCustomEndpoint, + ) +} + +func KMSProviderConfig() string { + if KMSCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + kms_custom_endpoint = "%s" + }`, + KMSCustomEndpoint, + ) +} + +func LoadBalancerProviderConfig() string { + if LoadBalancerCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + loadbalancer_custom_endpoint = "%s" + }`, + LoadBalancerCustomEndpoint, + ) +} + +func LogMeProviderConfig() string { + if LogMeCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + logme_custom_endpoint = "%s" + }`, + LogMeCustomEndpoint, + ) +} + +func MariaDBProviderConfig() string { + if MariaDBCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + mariadb_custom_endpoint = "%s" + }`, + MariaDBCustomEndpoint, + ) +} + +func ModelServingProviderConfig() string { + if ModelServingCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + } + ` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + modelserving_custom_endpoint = "%s" + }`, + ModelServingCustomEndpoint, + ) +} + +func MongoDBFlexProviderConfig() string { + if MongoDBFlexCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + mongodbflex_custom_endpoint = "%s" + }`, + MongoDBFlexCustomEndpoint, + ) +} + +func ObjectStorageProviderConfig() string { + if ObjectStorageCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + objectstorage_custom_endpoint = "%s" + }`, + ObjectStorageCustomEndpoint, + ) +} + +func OpenSearchProviderConfig() string { + if OpenSearchCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + opensearch_custom_endpoint = "%s" + }`, + OpenSearchCustomEndpoint, + ) +} + +func PostgresFlexProviderConfig(saFile string) string { + if PostgresFlexCustomEndpoint == "" { + return fmt.Sprintf(` + provider "stackitprivatepreview" { + default_region = "eu01" + service_account_key_path = "%s" + }`, saFile) + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + service_account_key_path = "%s" + postgresflex_custom_endpoint = "%s" + }`, + saFile, + PostgresFlexCustomEndpoint, + ) +} + +func RabbitMQProviderConfig() string { + if RabbitMQCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + rabbitmq_custom_endpoint = "%s" + }`, + RabbitMQCustomEndpoint, + ) +} + +func RedisProviderConfig() string { + if RedisCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + redis_custom_endpoint = "%s" + }`, + RedisCustomEndpoint, + ) +} + +func ResourceManagerProviderConfig() string { + key := GetTestProjectServiceAccountJson("") + if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" { + return fmt.Sprintf(` + provider "stackitprivatepreview" { + service_account_key = "%s" + }`, + key, + ) + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + resourcemanager_custom_endpoint = "%s" + authorization_custom_endpoint = "%s" + service_account_token = "%s" + }`, + ResourceManagerCustomEndpoint, + AuthorizationCustomEndpoint, + key, + ) +} + +func SecretsManagerProviderConfig() string { + if SecretsManagerCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + secretsmanager_custom_endpoint = "%s" + }`, + SecretsManagerCustomEndpoint, + ) +} + +func SQLServerFlexProviderConfig(saFile string) string { + if SQLServerFlexCustomEndpoint == "" { + return fmt.Sprintf(` + provider "stackitprivatepreview" { + default_region = "eu01" + service_account_key_path = "%s" + }`, saFile) + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + service_account_key_path = "%s" + sqlserverflex_custom_endpoint = "%s" + }`, + saFile, + SQLServerFlexCustomEndpoint, + ) +} + +func ServerBackupProviderConfig() string { + if ServerBackupCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + enable_beta_resources = true + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + server_backup_custom_endpoint = "%s" + enable_beta_resources = true + }`, + ServerBackupCustomEndpoint, + ) +} + +func ServerUpdateProviderConfig() string { + if ServerUpdateCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + enable_beta_resources = true + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + server_update_custom_endpoint = "%s" + enable_beta_resources = true + }`, + ServerUpdateCustomEndpoint, + ) +} + +func SKEProviderConfig() string { + if SKECustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + ske_custom_endpoint = "%s" + }`, + SKECustomEndpoint, + ) +} + +func AuthorizationProviderConfig() string { + if AuthorizationCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + experiments = ["iam"] + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + authorization_custom_endpoint = "%s" + experiments = ["iam"] + }`, + AuthorizationCustomEndpoint, + ) +} + +func ServiceAccountProviderConfig() string { + if ServiceAccountCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + enable_beta_resources = true + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + service_account_custom_endpoint = "%s" + enable_beta_resources = true + }`, + ServiceAccountCustomEndpoint, + ) +} + +func GitProviderConfig() string { + if GitCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + enable_beta_resources = true + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + git_custom_endpoint = "%s" + enable_beta_resources = true + }`, + GitCustomEndpoint, + ) +} + +func ScfProviderConfig() string { + if ScfCustomEndpoint == "" { + return ` + provider "stackitprivatepreview" { + default_region = "eu01" + }` + } + return fmt.Sprintf(` + provider "stackitprivatepreview" { + default_region = "eu01" + scf_custom_endpoint = "%s" + }`, + ScfCustomEndpoint, + ) +} diff --git a/internal/testutils/testutils.go b/internal/testutils/testutils.go new file mode 100644 index 00000000..c35c332c --- /dev/null +++ b/internal/testutils/testutils.go @@ -0,0 +1,219 @@ +package testutils + +import ( + "fmt" + "log" + "log/slog" + "os" + "os/exec" + "path/filepath" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-framework/providerserver" + "github.com/hashicorp/terraform-plugin-go/tfprotov6" + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/echoprovider" + "github.com/joho/godotenv" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit" +) + +const ( + // Default location of credentials JSON + // credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive + serviceAccountFilePath = ".stackit/service_account.json" +) + +var ( + // TestAccProtoV6ProviderFactories is used to instantiate a provider during + // acceptance testing. The factory function will be invoked for every Terraform + // CLI command executed to create a provider server to which the CLI can + // reattach. + TestAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){ + "stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()), + } + + // TestEphemeralAccProtoV6ProviderFactories is used to instantiate a provider during + // acceptance testing. The factory function will be invoked for every Terraform + // CLI command executed to create a provider server to which the CLI can + // reattach. + // + // See the Terraform acceptance test documentation on ephemeral resources for more information: + // https://developer.hashicorp.com/terraform/plugin/testing/acceptance-tests/ephemeral-resources + TestEphemeralAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){ + "stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()), + "echo": echoprovider.NewProviderServer(), + } + + // E2ETestsEnabled checks if end-to-end tests should be run. + // It is enabled when the TF_ACC environment variable is set to "1". + E2ETestsEnabled = os.Getenv("TF_ACC") == "1" + // OrganizationId is the id of organization used for tests + OrganizationId = os.Getenv("TF_ACC_ORGANIZATION_ID") + // ProjectId is the id of project used for tests + ProjectId = os.Getenv("TF_ACC_PROJECT_ID") + Region = os.Getenv("TF_ACC_REGION") + // ServiceAccountFile is the json file of the service account + ServiceAccountFile = os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE") + // ServerId is the id of a server used for some tests + ServerId = getenv("TF_ACC_SERVER_ID", "") + // TestProjectParentContainerID is the container id of the parent resource under which projects are created as part of the resource-manager acceptance tests + TestProjectParentContainerID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID") + // TestProjectParentUUID is the uuid of the parent resource under which projects are created as part of the resource-manager acceptance tests + TestProjectParentUUID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_UUID") + // TestProjectServiceAccountEmail is the e-mail of a service account with admin permissions on the organization under which projects are created as part of the resource-manager acceptance tests + TestProjectServiceAccountEmail = os.Getenv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL") + // TestProjectUserEmail is the e-mail of a user for the project created as part of the resource-manager acceptance tests + // Default email: acc-test@sa.stackit.cloud + TestProjectUserEmail = getenv("TF_ACC_TEST_PROJECT_USER_EMAIL", "acc-test@sa.stackit.cloud") + // TestImageLocalFilePath is the local path to an image file used for image acceptance tests + TestImageLocalFilePath = getenv("TF_ACC_TEST_IMAGE_LOCAL_FILE_PATH", "default") +) + +func Setup() { + root, err := getRoot() + if err != nil { + log.Fatalln(err) + } + err = godotenv.Load(fmt.Sprintf("%s/.env", *root)) + if err != nil { + slog.Info("could not find .env file - not loading .env") + return + } + slog.Info("loaded .env file", "path", *root) +} + +func getRoot() (*string, error) { + cmd := exec.Command("git", "rev-parse", "--show-toplevel") + out, err := cmd.Output() + if err != nil { + return nil, err + } + lines := strings.Split(string(out), "\n") + return &lines[0], nil +} + +func ResourceNameWithDateTime(name string) string { + dateTime := time.Now().Format(time.RFC3339) + // Remove timezone to have a smaller datetime + dateTimeTrimmed, _, _ := strings.Cut(dateTime, "+") + return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed) +} + +func GetTestProjectServiceAccountJson(path string) string { + var err error + token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_JSON") + if !tokenSet || token == "" { + token, err = readTestServiceAccountJsonFromFile(path) + if err != nil { + return "" + } + } + return token +} + +//func GetTestProjectServiceAccountToken(path string) string { +// var err error +// token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN") +// if !tokenSet || token == "" { +// token, err = readTestTokenFromCredentialsFile(path) +// if err != nil { +// return "" +// } +// } +// return token +//} +// +//func readTestTokenFromCredentialsFile(path string) (string, error) { +// if path == "" { +// customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH") +// if !customPathSet || customPath == "" { +// path = credentialsFilePath +// home, err := os.UserHomeDir() +// if err != nil { +// return "", fmt.Errorf("getting home directory: %w", err) +// } +// path = filepath.Join(home, path) +// } else { +// path = customPath +// } +// } +// +// credentialsRaw, err := os.ReadFile(path) +// if err != nil { +// return "", fmt.Errorf("opening file: %w", err) +// } +// +// var credentials struct { +// TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN string `json:"TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN"` +// } +// err = json.Unmarshal(credentialsRaw, &credentials) +// if err != nil { +// return "", fmt.Errorf("unmarshalling credentials: %w", err) +// } +// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil +//} + +func readTestServiceAccountJsonFromFile(path string) (string, error) { + if path == "" { + customPath, customPathSet := os.LookupEnv("STACKIT_SERVICE_ACCOUNT_PATH") + if !customPathSet || customPath == "" { + path = serviceAccountFilePath + home, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("getting home directory: %w", err) + } + path = filepath.Join(home, path) + } else { + path = customPath + } + } + + credentialsRaw, err := os.ReadFile(path) + if err != nil { + return "", fmt.Errorf("opening file: %w", err) + } + return string(credentialsRaw), nil +} + +func getenv(key, defaultValue string) string { + val := os.Getenv(key) + if val == "" { + return defaultValue + } + return val +} + +// CreateDefaultLocalFile is a helper for local_file_path. No real data is created +func CreateDefaultLocalFile() os.File { + // Define the file name and size + fileName := "test-512k.img" + size := 512 * 1024 // 512 KB + + // Create the file + file, err := os.Create(fileName) + if err != nil { + panic(err) + } + + // Seek to the desired position (512 KB) + _, err = file.Seek(int64(size), 0) + if err != nil { + panic(err) + } + + return *file +} + +func ConvertConfigVariable(variable config.Variable) string { + tmpByteArray, _ := variable.MarshalJSON() + // In case the variable is a string, the quotes should be removed + if tmpByteArray[0] == '"' && tmpByteArray[len(tmpByteArray)-1] == '"' { + result := string(tmpByteArray[1 : len(tmpByteArray)-1]) + // Replace escaped quotes which where added MarshalJSON + rawString := strings.ReplaceAll(result, `\"`, `"`) + return rawString + } + return string(tmpByteArray) +} diff --git a/internal/testutils/testutils_test.go b/internal/testutils/testutils_test.go new file mode 100644 index 00000000..4e18bd1e --- /dev/null +++ b/internal/testutils/testutils_test.go @@ -0,0 +1,48 @@ +package testutils + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/config" +) + +func TestConvertConfigVariable(t *testing.T) { + tests := []struct { + name string + variable config.Variable + want string + }{ + { + name: "string", + variable: config.StringVariable("test"), + want: "test", + }, + { + name: "bool: true", + variable: config.BoolVariable(true), + want: "true", + }, + { + name: "bool: false", + variable: config.BoolVariable(false), + want: "false", + }, + { + name: "integer", + variable: config.IntegerVariable(10), + want: "10", + }, + { + name: "quoted string", + variable: config.StringVariable(`instance =~ ".*"`), + want: `instance =~ ".*"`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := ConvertConfigVariable(tt.variable); got != tt.want { + t.Errorf("ConvertConfigVariable() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/stackit/internal/services/postgresflexalpha/instance/resource_test.go b/stackit/internal/services/postgresflexalpha/instance/resource_test.go deleted file mode 100644 index c84c1a5d..00000000 --- a/stackit/internal/services/postgresflexalpha/instance/resource_test.go +++ /dev/null @@ -1,280 +0,0 @@ -package postgresflexalpha - -import ( - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "os" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/acctest" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil" -) - -var ( - validFlavor = "2.4" - kekKeyRingId = "" - kekKeyVersion = "" - kekKeySA = "" -) - -func testAccPreCheck(t *testing.T) { - // TODO: if needed ... -} - -//func TestAccResourceExample_parallel(t *testing.T) { -// t.Parallel() -// -// exData := resData{ -// Region: "eu01", -// ServiceAccountFilePath: sa_file, -// ProjectID: project_id, -// Name: acctest.RandomWithPrefix("tf-acc"), -// } -// -// resource.Test(t, resource.TestCase{ -// ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, -// Steps: []resource.TestStep{ -// { -// Config: testAccResourceEncryptionExampleConfig(exData), -// Check: resource.TestCheckResourceAttrSet("example_resource.test", "id"), -// }, -// }, -// }) -//} - -type resData struct { - ServiceAccountFilePath string - ProjectID string - Region string - Name string - Flavor string - BackupSchedule string - RetentionDays uint32 -} - -func getExample() resData { - return resData{ - Region: testutil.Region, - ServiceAccountFilePath: testutil.ServiceAccountFile, - ProjectID: testutil.ProjectId, - Name: acctest.RandomWithPrefix("tf-acc"), - Flavor: "2.4", - BackupSchedule: "0 0 * * *", - RetentionDays: 33, - } -} - -func TestAccResourceExample_basic(t *testing.T) { - exData := getExample() - - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - { - Config: testAccResourceNoEncryptionExampleConfig(exData), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), - resource.TestCheckResourceAttrSet("example_resource.test", "id"), - ), - }, - //// Create and verify - //{ - // Config: testAccResourceNoEncryptionExampleConfig(exData), - // Check: resource.ComposeTestCheckFunc( - // resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), - // ), - //}, - //// Update and verify - //{ - // Config: testAccResourceNoEncryptionExampleConfig(exData), - // Check: resource.ComposeTestCheckFunc( - // resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), - // ), - //}, - // Import test - { - ResourceName: "example_resource.test", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccResourceNoEncryptionExampleConfig(data resData) string { - return fmt.Sprintf(` - -%[1]s - -resource "stackitprivatepreview_postgresflexalpha_instance" "test" { - project_id = %[2]q - name = %[3]q - backup_schedule = %[4]q - retention_days = %[5]d - flavor_id = %[6]q - replicas = 1 - storage = { - performance_class = "premium-perf2-stackit" - size = 10 - } - network = { - acl = ["0.0.0.0/0"] - access_scope = "PUBLIC" - } - version = 17 -} - -`, - testutil.PostgresFlexProviderConfig(data.ServiceAccountFilePath), - data.ProjectID, - data.Name, - data.BackupSchedule, - data.RetentionDays, - data.Flavor, - data.Name, - ) -} - -func testAccResourceEncryptionExampleConfig(data resData) string { - return fmt.Sprintf(` - -%[1]s - -resource "stackitprivatepreview_postgresflexalpha_instance" "test" { - project_id = %[2]q - name = %[3]q - backup_schedule = "0 0 * * *" - retention_days = 45 - flavor_id = "2.1" - replicas = 1 - storage = { - performance_class = "premium-perf2-stackit" - size = 10 - } - encryption = { - kek_key_id = "key01" - kek_key_ring_id = "key_ring_01" - kek_key_version = 1 - service_account = "service@account.email" - } - network = { - acl = ["0.0.0.0/0"] - access_scope = "PUBLIC" - } - version = 14 -} - -`, - testutil.PostgresFlexProviderConfig(data.ServiceAccountFilePath), - data.ProjectID, - data.Name, - ) -} - -func testCheckResourceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("resource not found: %s", resourceName) - } - - if rs.Primary.ID == "" { - return fmt.Errorf("resource ID not set") - } - - // Verify resource exists in the API - //client := testAccProvider.Meta().(*APIClient) - //_, err := client.GetResource(rs.Primary.ID) - //if err != nil { - // return fmt.Errorf("error fetching resource: %w", err) - //} - - return nil - } -} - -func setupMockServer() *httptest.Server { - mux := http.NewServeMux() - - mux.HandleFunc("/api/resources", func(w http.ResponseWriter, r *http.Request) { - switch r.Method { - case http.MethodPost: - w.WriteHeader(http.StatusCreated) - json.NewEncoder(w).Encode(map[string]string{ - "id": "mock-id-123", - "name": "test-resource", - }) - case http.MethodGet: - w.WriteHeader(http.StatusOK) - json.NewEncoder(w).Encode([]map[string]string{}) - } - }) - - return httptest.NewServer(mux) -} - -func TestUnitResourceCreate(t *testing.T) { - server := setupMockServer() - defer server.Close() - - // Configure provider to use mock server URL - os.Setenv("API_ENDPOINT", server.URL) - - // Run unit tests against mock -} - -// type postgresFlexClientMocked struct { -// returnError bool -// getFlavorsResp *postgresflex.GetFlavorsResponse -// } -// -// func (c *postgresFlexClientMocked) ListFlavorsExecute(_ context.Context, _, _ string) (*postgresflex.GetFlavorsResponse, error) { -// if c.returnError { -// return nil, fmt.Errorf("get flavors failed") -// } -// -// return c.getFlavorsResp, nil -// } - -//func TestNewInstanceResource(t *testing.T) { -// exData := resData{ -// Region: "eu01", -// ServiceAccountFilePath: sa_file, -// ProjectID: project_id, -// Name: "testRes", -// } -// resource.Test(t, resource.TestCase{ -// ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, -// Steps: []resource.TestStep{ -// { -// Config: testAccResourceEncryptionExampleConfig(exData), -// Check: resource.ComposeAggregateTestCheckFunc( -// resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), -// resource.TestCheckResourceAttrSet("example_resource.test", "id"), -// ), -// }, -// }, -// }) -// -// //tests := []struct { -// // name string -// // want resource.Resource -// //}{ -// // { -// // name: "create empty instance resource", -// // want: &instanceResource{}, -// // }, -// //} -// //for _, tt := range tests { -// // t.Run(tt.name, func(t *testing.T) { -// // if got := NewInstanceResource(); !reflect.DeepEqual(got, tt.want) { -// // t.Errorf("NewInstanceResource() = %v, want %v", got, tt.want) -// // } -// // }) -// //} -//} diff --git a/stackit/internal/services/postgresflexalpha/instance/schema_test.go b/stackit/internal/services/postgresflexalpha/instance/schema_test.go deleted file mode 100644 index ec567d75..00000000 --- a/stackit/internal/services/postgresflexalpha/instance/schema_test.go +++ /dev/null @@ -1,33 +0,0 @@ -package postgresflexalpha - -import ( - "context" - "testing" - - // The fwresource import alias is so there is no collision - // with the more typical acceptance testing import: - // "github.com/hashicorp/terraform-plugin-testing/helper/resource" - fwresource "github.com/hashicorp/terraform-plugin-framework/resource" -) - -func TestInstanceResourceSchema(t *testing.T) { - t.Parallel() - - ctx := context.Background() - schemaRequest := fwresource.SchemaRequest{} - schemaResponse := &fwresource.SchemaResponse{} - - // Instantiate the resource.Resource and call its Schema method - NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse) - - if schemaResponse.Diagnostics.HasError() { - t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics) - } - - // Validate the schema - diagnostics := schemaResponse.Schema.ValidateImplementation(ctx) - - if diagnostics.HasError() { - t.Fatalf("Schema validation diagnostics: %+v", diagnostics) - } -} diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go index fbc442e4..dba1a086 100644 --- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go +++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go @@ -1,622 +1,862 @@ -// Copyright (c) STACKIT - package postgresflexalpha_test import ( "context" _ "embed" "fmt" - "log/slog" "os" - "strings" + "strconv" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/joho/godotenv" - "github.com/stackitcloud/stackit-sdk-go/core/utils" + postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance" - "github.com/stackitcloud/stackit-sdk-go/core/config" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil" - - postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" + // The fwresource import alias is so there is no collision + // with the more typical acceptance testing import: + // "github.com/hashicorp/terraform-plugin-testing/helper/resource" + fwresource "github.com/hashicorp/terraform-plugin-framework/resource" ) -var ( - //go:embed testdata/resource-complete.tf - resourceSecurityGroupMinConfig string //nolint:unused // needs implementation -) +func TestInstanceResourceSchema(t *testing.T) { + t.Parallel() -func setup() { - err := godotenv.Load() - if err != nil { - slog.Info("could not find .env file - not loading .env") - return + ctx := context.Background() + schemaRequest := fwresource.SchemaRequest{} + schemaResponse := &fwresource.SchemaResponse{} + + // Instantiate the resource.Resource and call its Schema method + postgresflexalpha.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse) + + if schemaResponse.Diagnostics.HasError() { + t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics) + } + + // Validate the schema + diagnostics := schemaResponse.Schema.ValidateImplementation(ctx) + + if diagnostics.HasError() { + t.Fatalf("Schema validation diagnostics: %+v", diagnostics) } - slog.Info("loaded .env file") } +var ( + //go:embed testdata/resource-no-enc.tf + resourceConfigNoEnc string //nolint:unused // needs implementation + + //go:embed testdata/resource-enc.tf + resourceConfigEnc string //nolint:unused // needs implementation +) + func TestMain(m *testing.M) { - setup() + testutils.Setup() code := m.Run() // shutdown() os.Exit(code) } -// Instance resource data -var instanceResource = map[string]string{ - "project_id": testutil.ProjectId, - "region": "eu01", - "name": fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum)), - "acl": "192.168.0.0/16", - "backup_schedule": "00 16 * * *", - "backup_schedule_updated": "00 12 * * *", - "retention_days": "33", - "flavor_cpu": "2", - "flavor_ram": "4", - "flavor_description": "Small, Compute optimized", - "replicas": "1", - "storage_class": "premium-perf12-stackit", - "storage_size": "5", - "version": "14", - "flavor_id": "2.4", - "kek_key_id": "UUID1", - "kek_key_ring_id": "UUID2", - "kek_key_version": "1", - "service_account": "service@account.com", - "access_scope": "SNA", +//var ( +// validFlavor = "2.4" +// kekKeyRingId = "" +// kekKeyVersion = "" +// kekKeySA = "" +//) + +func testAccPreCheck(t *testing.T) { + if _, ok := os.LookupEnv("TF_ACC_PROJECT_ID"); !ok { + t.Fatalf("could not find env var TF_ACC_PROJECT_ID") + } } -// User resource data -var userResource = map[string]string{ - "username": fmt.Sprintf("tfaccuser%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlpha)), - "role": "createdb", - "project_id": testutil.ProjectId, +//func TestAccResourceExample_parallel(t *testing.T) { +// t.Parallel() +// +// exData := resData{ +// Region: "eu01", +// ServiceAccountFilePath: sa_file, +// ProjectID: project_id, +// Name: acctest.RandomWithPrefix("tf-acc"), +// } +// +// resource.Test(t, resource.TestCase{ +// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, +// Steps: []resource.TestStep{ +// { +// Config: testAccResourceEncryptionExampleConfig(exData), +// Check: resource.TestCheckResourceAttrSet("example_resource.test", "id"), +// }, +// }, +// }) +//} + +type resData struct { + ServiceAccountFilePath string + ProjectId string + Region string + Name string + TfName string + FlavorId string + BackupSchedule string + UseEncryption bool + KekKeyId string + KekKeyRingId string + KekKeyVersion uint8 + KekServiceAccount string + PerformanceClass string + Replicas uint32 + Size uint32 + AclString string + AccessScope string + RetentionDays uint32 + Version string } -// Database resource data -var databaseResource = map[string]string{ - "name": fmt.Sprintf("tfaccdb%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlphaNum)), - "project_id": testutil.ProjectId, +func getExample() resData { + name := acctest.RandomWithPrefix("tf-acc") + return resData{ + Region: os.Getenv("TF_ACC_REGION"), + ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"), + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Name: name, + TfName: name, + FlavorId: "2.4", + BackupSchedule: "0 0 * * *", + UseEncryption: false, + RetentionDays: 33, + Replicas: 1, + PerformanceClass: "premium-perf2-stackit", + Size: 10, + AclString: "0.0.0.0/0", + AccessScope: "PUBLIC", + Version: "17", + } } -func configResources(backupSchedule string, _ *string) string { - return fmt.Sprintf( - ` - %s - - - resource "stackitprivatepreview_postgresflexalpha_instance" "instance" { - project_id = "%s" - region = "%s" - name = "%s" - backup_schedule = "%s" - retention_days = %s - flavor_id = %s - replicas = %s - storage = { - performance_class = "%s" - size = %s - } - encryption = { - kek_key_id = "%s" - kek_key_ring_id = "%s" - kek_key_version = "%s" - service_account = "%s" - } - network = { - acl = ["%s"] - access_scope = "%s" - } - version = %s - } - - resource "stackitprivatepreview_postgresflexalpha_user" "user" { - project_id = "%s" - instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id - username = "%s" - roles = ["%s"] - } - - resource "stackitprivatepreview_postgresflexalpha_database" "database" { - project_id = "%s" - instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id - name = "%s" - owner = stackitprivatepreview_postgresflexalpha_user.user.username - } - `, - testutil.PostgresFlexProviderConfig( - utils.GetEnvOrDefault("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_FILE", "~/service-account.json"), - ), - instanceResource["project_id"], - instanceResource["region"], - instanceResource["name"], - backupSchedule, - instanceResource["retention_days"], - instanceResource["flavor_id"], - instanceResource["replicas"], - instanceResource["storage_class"], - instanceResource["storage_size"], - instanceResource["kek_key_id"], - instanceResource["kek_key_ring_id"], - instanceResource["kek_key_version"], - instanceResource["service_account"], - instanceResource["acl"], - instanceResource["access_scope"], - instanceResource["version"], - - userResource["project_id"], - userResource["username"], - userResource["role"], - - databaseResource["project_id"], - databaseResource["name"], +func TestAccResourceExample_basic(t *testing.T) { + exData := getExample() + resName := fmt.Sprintf( + "stackitprivatepreview_postgresflexalpha_instance.%s", + exData.TfName, ) -} -func TestAccPostgresFlexFlexResource(t *testing.T) { - resource.Test( - t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccCheckPostgresFlexDestroy, - Steps: []resource.TestStep{ - // Creation - { - // testdata/ - // ConfigDirectory: config.TestNameDirectory(), + updNameData := exData + updNameData.Name = "name-updated" - // testdata// - // ConfigDirectory: config.TestStepDirectory(), - Config: configResources(instanceResource["backup_schedule"], &testutil.Region), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "project_id", - instanceResource["project_id"], - ), - resource.TestCheckResourceAttrSet( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "instance_id", - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "name", - instanceResource["name"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "acl.#", - "1", - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "acl.0", - instanceResource["acl"], - ), - resource.TestCheckResourceAttrSet( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.id", - ), - resource.TestCheckResourceAttrSet( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.description", - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "backup_schedule", - instanceResource["backup_schedule"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.cpu", - instanceResource["flavor_cpu"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.ram", - instanceResource["flavor_ram"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "replicas", - instanceResource["replicas"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "storage.class", - instanceResource["storage_class"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "storage.size", - instanceResource["storage_size"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "version", - instanceResource["version"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "region", - testutil.Region, - ), + updSizeData := exData + updSizeData.Size = 25 - // User - resource.TestCheckResourceAttrPair( - "stackitprivatepreview_postgresflexalpha_user.user", "project_id", - "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackitprivatepreview_postgresflexalpha_user.user", "instance_id", - "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackitprivatepreview_postgresflexalpha_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackitprivatepreview_postgresflexalpha_user.user", "password"), - - // Database - resource.TestCheckResourceAttrPair( - "stackitprivatepreview_postgresflexalpha_database.database", "project_id", - "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackitprivatepreview_postgresflexalpha_database.database", "instance_id", - "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_database.database", - "name", - databaseResource["name"], - ), - resource.TestCheckResourceAttrPair( - "stackitprivatepreview_postgresflexalpha_database.database", "owner", - "stackitprivatepreview_postgresflexalpha_user.user", "username", - ), - ), - }, - // data source - { - Config: fmt.Sprintf( - ` - %s - - data "stackitprivatepreview_postgresflexalpha_instance" "instance" { - project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id - instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id - } - - data "stackitprivatepreview_postgresflexalpha_user" "user" { - project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id - instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id - user_id = stackitprivatepreview_postgresflexalpha_user.user.user_id - } - - data "stackitprivatepreview_postgresflexalpha_database" "database" { - project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id - instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id - database_id = stackitprivatepreview_postgresflexalpha_database.database.database_id - } - `, - configResources(instanceResource["backup_schedule"], nil), - ), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "project_id", - instanceResource["project_id"], - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "name", - instanceResource["name"], - ), - resource.TestCheckResourceAttrPair( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", - "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", - "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackitprivatepreview_postgresflexalpha_user.user", "instance_id", - "stackitprivatepreview_postgresflexalpha_user.user", "instance_id", - ), - - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "acl.#", - "1", - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "acl.0", - instanceResource["acl"], - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "backup_schedule", - instanceResource["backup_schedule"], - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.id", - instanceResource["flavor_id"], - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.description", - instanceResource["flavor_description"], - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.cpu", - instanceResource["flavor_cpu"], - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.ram", - instanceResource["flavor_ram"], - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_instance.instance", - "replicas", - instanceResource["replicas"], - ), - - // User data - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_user.user", - "project_id", - userResource["project_id"], - ), - resource.TestCheckResourceAttrSet( - "data.stackitprivatepreview_postgresflexalpha_user.user", - "user_id", - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_user.user", - "username", - userResource["username"], - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_user.user", - "roles.#", - "1", - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_user.user", - "roles.0", - userResource["role"], - ), - resource.TestCheckResourceAttrSet( - "data.stackitprivatepreview_postgresflexalpha_user.user", - "host", - ), - resource.TestCheckResourceAttrSet( - "data.stackitprivatepreview_postgresflexalpha_user.user", - "port", - ), - - // Database data - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_database.database", - "project_id", - instanceResource["project_id"], - ), - resource.TestCheckResourceAttr( - "data.stackitprivatepreview_postgresflexalpha_database.database", - "name", - databaseResource["name"], - ), - resource.TestCheckResourceAttrPair( - "data.stackitprivatepreview_postgresflexalpha_database.database", - "instance_id", - "stackitprivatepreview_postgresflexalpha_instance.instance", - "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackitprivatepreview_postgresflexalpha_database.database", - "owner", - "data.stackitprivatepreview_postgresflexalpha_user.user", - "username", - ), - ), - }, - // Import - { - ResourceName: "stackitprivatepreview_postgresflexalpha_instance.instance", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_instance.instance"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_instance.instance") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - - return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - { - ResourceName: "stackitprivatepreview_postgresflexalpha_user.user", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_user.user"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_user.user") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - userId, ok := r.Primary.Attributes["user_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute user_id") - } - - return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password", "uri"}, - }, - { - ResourceName: "stackitprivatepreview_postgresflexalpha_database.database", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_database.database"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_database.database") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - databaseId, ok := r.Primary.Attributes["database_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute database_id") - } - - return fmt.Sprintf( - "%s,%s,%s,%s", - testutil.ProjectId, - testutil.Region, - instanceId, - databaseId, - ), nil - }, - ImportState: true, - ImportStateVerify: true, - }, - // Update - { - Config: configResources(instanceResource["backup_schedule_updated"], nil), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "project_id", - instanceResource["project_id"], - ), - resource.TestCheckResourceAttrSet( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "instance_id", - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "name", - instanceResource["name"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "acl.#", - "1", - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "acl.0", - instanceResource["acl"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "backup_schedule", - instanceResource["backup_schedule_updated"], - ), - resource.TestCheckResourceAttrSet( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.id", - ), - resource.TestCheckResourceAttrSet( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.description", - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.cpu", - instanceResource["flavor_cpu"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "flavor.ram", - instanceResource["flavor_ram"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "replicas", - instanceResource["replicas"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "storage.class", - instanceResource["storage_class"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "storage.size", - instanceResource["storage_size"], - ), - resource.TestCheckResourceAttr( - "stackitprivatepreview_postgresflexalpha_instance.instance", - "version", - instanceResource["version"], - ), - ), - }, - // Deletion is done by the framework implicitly + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + exData, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", exData.Name), + resource.TestCheckResourceAttrSet(resName, "id"), + ), }, + // Update name and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + updNameData, + ), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", updNameData.Name), + ), + }, + // Update size and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + updSizeData, + ), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + resName, + "storage.size", + strconv.Itoa(int(updSizeData.Size)), + ), + ), + }, + //// Import test + //{ + // ResourceName: "example_resource.test", + // ImportState: true, + // ImportStateVerify: true, + //}, }, - ) + }) } -func testAccCheckPostgresFlexDestroy(s *terraform.State) error { - ctx := context.Background() - var client *postgresflex.APIClient - var err error - if testutil.PostgresFlexCustomEndpoint == "" { - client, err = postgresflex.NewAPIClient() - } else { - client, err = postgresflex.NewAPIClient( - config.WithEndpoint(testutil.PostgresFlexCustomEndpoint), - ) - } - if err != nil { - return fmt.Errorf("creating client: %w", err) - } +//func setupMockServer() *httptest.Server { +// mux := http.NewServeMux() +// +// mux.HandleFunc("/api/resources", func(w http.ResponseWriter, r *http.Request) { +// switch r.Method { +// case http.MethodPost: +// w.WriteHeader(http.StatusCreated) +// err := json.NewEncoder(w).Encode(map[string]string{ +// "id": "mock-id-123", +// "name": "test-resource", +// }) +// if err != nil { +// log.Fatalln(err) +// } +// case http.MethodGet: +// w.WriteHeader(http.StatusOK) +// err := json.NewEncoder(w).Encode([]map[string]string{}) +// if err != nil { +// log.Fatalln(err) +// } +// } +// }) +// +// return httptest.NewServer(mux) +//} +// +//func TestUnitResourceCreate(t *testing.T) { +// server := setupMockServer() +// defer server.Close() +// +// // Configure provider to use mock server URL +// err := os.Setenv("API_ENDPOINT", server.URL) +// if err != nil { +// log.Fatalln(err) +// } +// +// // Run unit tests against mock +//} - instancesToDestroy := []string{} - for _, rs := range s.RootModule().Resources { - if rs.Type != "stackitprivatepreview_postgresflexalpha_instance" { - continue - } - // instance terraform ID: = "[project_id],[region],[instance_id]" - instanceId := strings.Split(rs.Primary.ID, core.Separator)[2] - instancesToDestroy = append(instancesToDestroy, instanceId) - } +// type postgresFlexClientMocked struct { +// returnError bool +// getFlavorsResp *postgresflex.GetFlavorsResponse +// } +// +// func (c *postgresFlexClientMocked) ListFlavorsExecute(_ context.Context, _, _ string) (*postgresflex.GetFlavorsResponse, error) { +// if c.returnError { +// return nil, fmt.Errorf("get flavors failed") +// } +// +// return c.getFlavorsResp, nil +// } - instancesResp, err := client.ListInstancesRequest(ctx, testutil.ProjectId, testutil.Region).Execute() - if err != nil { - return fmt.Errorf("getting instancesResp: %w", err) - } +//func TestNewInstanceResource(t *testing.T) { +// exData := resData{ +// Region: "eu01", +// ServiceAccountFilePath: sa_file, +// ProjectID: project_id, +// Name: "testRes", +// } +// resource.Test(t, resource.TestCase{ +// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, +// Steps: []resource.TestStep{ +// { +// Config: testAccResourceEncryptionExampleConfig(exData), +// Check: resource.ComposeAggregateTestCheckFunc( +// resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), +// resource.TestCheckResourceAttrSet("example_resource.test", "id"), +// ), +// }, +// }, +// }) +// +// //tests := []struct { +// // name string +// // want resource.Resource +// //}{ +// // { +// // name: "create empty instance resource", +// // want: &instanceResource{}, +// // }, +// //} +// //for _, tt := range tests { +// // t.Run(tt.name, func(t *testing.T) { +// // if got := NewInstanceResource(); !reflect.DeepEqual(got, tt.want) { +// // t.Errorf("NewInstanceResource() = %v, want %v", got, tt.want) +// // } +// // }) +// //} +//} - items := *instancesResp.Instances - for i := range items { - if items[i].Id == nil { - continue - } - if utils.Contains(instancesToDestroy, *items[i].Id) { - // TODO @mhenselin - does force still exist? - err := client.DeleteInstanceRequestExecute(ctx, testutil.ProjectId, testutil.Region, *items[i].Id) - if err != nil { - return fmt.Errorf("deleting instance %s during CheckDestroy: %w", *items[i].Id, err) - } - } - } - return nil -} +//// Instance resource data +//var instanceResource = map[string]string{ +// "project_id": testutils.ProjectId, +// "region": "eu01", +// "name": fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum)), +// "acl": "192.168.0.0/16", +// "backup_schedule": "00 16 * * *", +// "backup_schedule_updated": "00 12 * * *", +// "retention_days": "33", +// "flavor_cpu": "2", +// "flavor_ram": "4", +// "flavor_description": "Small, Compute optimized", +// "replicas": "1", +// "storage_class": "premium-perf12-stackit", +// "storage_size": "5", +// "version": "14", +// "flavor_id": "2.4", +// "kek_key_id": "UUID1", +// "kek_key_ring_id": "UUID2", +// "kek_key_version": "1", +// "service_account": "service@account.com", +// "access_scope": "SNA", +//} +// +//// User resource data +//var userResource = map[string]string{ +// "username": fmt.Sprintf("tfaccuser%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlpha)), +// "role": "createdb", +// "project_id": testutils.ProjectId, +//} +// +//// Database resource data +//var databaseResource = map[string]string{ +// "name": fmt.Sprintf("tfaccdb%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlphaNum)), +// "project_id": testutils.ProjectId, +//} +// +//func configResources(backupSchedule string, _ *string) string { +// return fmt.Sprintf( +// ` +// %s +// +// +// resource "stackitprivatepreview_postgresflexalpha_instance" "instance" { +// project_id = "%s" +// region = "%s" +// name = "%s" +// backup_schedule = "%s" +// retention_days = %s +// flavor_id = %s +// replicas = %s +// storage = { +// performance_class = "%s" +// size = %s +// } +// encryption = { +// kek_key_id = "%s" +// kek_key_ring_id = "%s" +// kek_key_version = "%s" +// service_account = "%s" +// } +// network = { +// acl = ["%s"] +// access_scope = "%s" +// } +// version = %s +// } +// +// resource "stackitprivatepreview_postgresflexalpha_user" "user" { +// project_id = "%s" +// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id +// username = "%s" +// roles = ["%s"] +// } +// +// resource "stackitprivatepreview_postgresflexalpha_database" "database" { +// project_id = "%s" +// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id +// name = "%s" +// owner = stackitprivatepreview_postgresflexalpha_user.user.username +// } +// `, +// testutils.PostgresFlexProviderConfig( +// utils.GetEnvOrDefault("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_FILE", "~/service-account.json"), +// ), +// instanceResource["project_id"], +// instanceResource["region"], +// instanceResource["name"], +// backupSchedule, +// instanceResource["retention_days"], +// instanceResource["flavor_id"], +// instanceResource["replicas"], +// instanceResource["storage_class"], +// instanceResource["storage_size"], +// instanceResource["kek_key_id"], +// instanceResource["kek_key_ring_id"], +// instanceResource["kek_key_version"], +// instanceResource["service_account"], +// instanceResource["acl"], +// instanceResource["access_scope"], +// instanceResource["version"], +// +// userResource["project_id"], +// userResource["username"], +// userResource["role"], +// +// databaseResource["project_id"], +// databaseResource["name"], +// ) +//} +// +//func TestAccPostgresFlexFlexResource(t *testing.T) { +// resource.Test( +// t, resource.TestCase{ +// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, +// CheckDestroy: testAccCheckPostgresFlexDestroy, +// Steps: []resource.TestStep{ +// // Creation +// { +// // testdata/ +// // ConfigDirectory: config.TestNameDirectory(), +// +// // testdata// +// // ConfigDirectory: config.TestStepDirectory(), +// Config: configResources(instanceResource["backup_schedule"], &testutils.Region), +// Check: resource.ComposeAggregateTestCheckFunc( +// // Instance +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "project_id", +// instanceResource["project_id"], +// ), +// resource.TestCheckResourceAttrSet( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "instance_id", +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "name", +// instanceResource["name"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "acl.#", +// "1", +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "acl.0", +// instanceResource["acl"], +// ), +// resource.TestCheckResourceAttrSet( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.id", +// ), +// resource.TestCheckResourceAttrSet( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.description", +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "backup_schedule", +// instanceResource["backup_schedule"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.cpu", +// instanceResource["flavor_cpu"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.ram", +// instanceResource["flavor_ram"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "replicas", +// instanceResource["replicas"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "storage.class", +// instanceResource["storage_class"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "storage.size", +// instanceResource["storage_size"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "version", +// instanceResource["version"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "region", +// testutils.Region, +// ), +// +// // User +// resource.TestCheckResourceAttrPair( +// "stackitprivatepreview_postgresflexalpha_user.user", "project_id", +// "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", +// ), +// resource.TestCheckResourceAttrPair( +// "stackitprivatepreview_postgresflexalpha_user.user", "instance_id", +// "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", +// ), +// resource.TestCheckResourceAttrSet("stackitprivatepreview_postgresflexalpha_user.user", "user_id"), +// resource.TestCheckResourceAttrSet("stackitprivatepreview_postgresflexalpha_user.user", "password"), +// +// // Database +// resource.TestCheckResourceAttrPair( +// "stackitprivatepreview_postgresflexalpha_database.database", "project_id", +// "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", +// ), +// resource.TestCheckResourceAttrPair( +// "stackitprivatepreview_postgresflexalpha_database.database", "instance_id", +// "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_database.database", +// "name", +// databaseResource["name"], +// ), +// resource.TestCheckResourceAttrPair( +// "stackitprivatepreview_postgresflexalpha_database.database", "owner", +// "stackitprivatepreview_postgresflexalpha_user.user", "username", +// ), +// ), +// }, +// // data source +// { +// Config: fmt.Sprintf( +// ` +// %s +// +// data "stackitprivatepreview_postgresflexalpha_instance" "instance" { +// project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id +// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id +// } +// +// data "stackitprivatepreview_postgresflexalpha_user" "user" { +// project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id +// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id +// user_id = stackitprivatepreview_postgresflexalpha_user.user.user_id +// } +// +// data "stackitprivatepreview_postgresflexalpha_database" "database" { +// project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id +// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id +// database_id = stackitprivatepreview_postgresflexalpha_database.database.database_id +// } +// `, +// configResources(instanceResource["backup_schedule"], nil), +// ), +// Check: resource.ComposeAggregateTestCheckFunc( +// // Instance data +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "project_id", +// instanceResource["project_id"], +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "name", +// instanceResource["name"], +// ), +// resource.TestCheckResourceAttrPair( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", +// "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id", +// ), +// resource.TestCheckResourceAttrPair( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", +// "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id", +// ), +// resource.TestCheckResourceAttrPair( +// "data.stackitprivatepreview_postgresflexalpha_user.user", "instance_id", +// "stackitprivatepreview_postgresflexalpha_user.user", "instance_id", +// ), +// +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "acl.#", +// "1", +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "acl.0", +// instanceResource["acl"], +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "backup_schedule", +// instanceResource["backup_schedule"], +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.id", +// instanceResource["flavor_id"], +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.description", +// instanceResource["flavor_description"], +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.cpu", +// instanceResource["flavor_cpu"], +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.ram", +// instanceResource["flavor_ram"], +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_instance.instance", +// "replicas", +// instanceResource["replicas"], +// ), +// +// // User data +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_user.user", +// "project_id", +// userResource["project_id"], +// ), +// resource.TestCheckResourceAttrSet( +// "data.stackitprivatepreview_postgresflexalpha_user.user", +// "user_id", +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_user.user", +// "username", +// userResource["username"], +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_user.user", +// "roles.#", +// "1", +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_user.user", +// "roles.0", +// userResource["role"], +// ), +// resource.TestCheckResourceAttrSet( +// "data.stackitprivatepreview_postgresflexalpha_user.user", +// "host", +// ), +// resource.TestCheckResourceAttrSet( +// "data.stackitprivatepreview_postgresflexalpha_user.user", +// "port", +// ), +// +// // Database data +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_database.database", +// "project_id", +// instanceResource["project_id"], +// ), +// resource.TestCheckResourceAttr( +// "data.stackitprivatepreview_postgresflexalpha_database.database", +// "name", +// databaseResource["name"], +// ), +// resource.TestCheckResourceAttrPair( +// "data.stackitprivatepreview_postgresflexalpha_database.database", +// "instance_id", +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "instance_id", +// ), +// resource.TestCheckResourceAttrPair( +// "data.stackitprivatepreview_postgresflexalpha_database.database", +// "owner", +// "data.stackitprivatepreview_postgresflexalpha_user.user", +// "username", +// ), +// ), +// }, +// // Import +// { +// ResourceName: "stackitprivatepreview_postgresflexalpha_instance.instance", +// ImportStateIdFunc: func(s *terraform.State) (string, error) { +// r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_instance.instance"] +// if !ok { +// return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_instance.instance") +// } +// instanceId, ok := r.Primary.Attributes["instance_id"] +// if !ok { +// return "", fmt.Errorf("couldn't find attribute instance_id") +// } +// +// return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil +// }, +// ImportState: true, +// ImportStateVerify: true, +// ImportStateVerifyIgnore: []string{"password"}, +// }, +// { +// ResourceName: "stackitprivatepreview_postgresflexalpha_user.user", +// ImportStateIdFunc: func(s *terraform.State) (string, error) { +// r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_user.user"] +// if !ok { +// return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_user.user") +// } +// instanceId, ok := r.Primary.Attributes["instance_id"] +// if !ok { +// return "", fmt.Errorf("couldn't find attribute instance_id") +// } +// userId, ok := r.Primary.Attributes["user_id"] +// if !ok { +// return "", fmt.Errorf("couldn't find attribute user_id") +// } +// +// return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil +// }, +// ImportState: true, +// ImportStateVerify: true, +// ImportStateVerifyIgnore: []string{"password", "uri"}, +// }, +// { +// ResourceName: "stackitprivatepreview_postgresflexalpha_database.database", +// ImportStateIdFunc: func(s *terraform.State) (string, error) { +// r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_database.database"] +// if !ok { +// return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_database.database") +// } +// instanceId, ok := r.Primary.Attributes["instance_id"] +// if !ok { +// return "", fmt.Errorf("couldn't find attribute instance_id") +// } +// databaseId, ok := r.Primary.Attributes["database_id"] +// if !ok { +// return "", fmt.Errorf("couldn't find attribute database_id") +// } +// +// return fmt.Sprintf( +// "%s,%s,%s,%s", +// testutils.ProjectId, +// testutils.Region, +// instanceId, +// databaseId, +// ), nil +// }, +// ImportState: true, +// ImportStateVerify: true, +// }, +// // Update +// { +// Config: configResources(instanceResource["backup_schedule_updated"], nil), +// Check: resource.ComposeAggregateTestCheckFunc( +// // Instance data +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "project_id", +// instanceResource["project_id"], +// ), +// resource.TestCheckResourceAttrSet( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "instance_id", +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "name", +// instanceResource["name"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "acl.#", +// "1", +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "acl.0", +// instanceResource["acl"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "backup_schedule", +// instanceResource["backup_schedule_updated"], +// ), +// resource.TestCheckResourceAttrSet( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.id", +// ), +// resource.TestCheckResourceAttrSet( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.description", +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.cpu", +// instanceResource["flavor_cpu"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "flavor.ram", +// instanceResource["flavor_ram"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "replicas", +// instanceResource["replicas"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "storage.class", +// instanceResource["storage_class"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "storage.size", +// instanceResource["storage_size"], +// ), +// resource.TestCheckResourceAttr( +// "stackitprivatepreview_postgresflexalpha_instance.instance", +// "version", +// instanceResource["version"], +// ), +// ), +// }, +// // Deletion is done by the framework implicitly +// }, +// }, +// ) +//} +// +//func testAccCheckPostgresFlexDestroy(s *terraform.State) error { +// ctx := context.Background() +// var client *postgresflex.APIClient +// var err error +// if testutils.PostgresFlexCustomEndpoint == "" { +// client, err = postgresflex.NewAPIClient() +// } else { +// client, err = postgresflex.NewAPIClient( +// config.WithEndpoint(testutils.PostgresFlexCustomEndpoint), +// ) +// } +// if err != nil { +// return fmt.Errorf("creating client: %w", err) +// } +// +// instancesToDestroy := []string{} +// for _, rs := range s.RootModule().Resources { +// if rs.Type != "stackitprivatepreview_postgresflexalpha_instance" { +// continue +// } +// // instance terraform ID: = "[project_id],[region],[instance_id]" +// instanceId := strings.Split(rs.Primary.ID, core.Separator)[2] +// instancesToDestroy = append(instancesToDestroy, instanceId) +// } +// +// instancesResp, err := client.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region).Execute() +// if err != nil { +// return fmt.Errorf("getting instancesResp: %w", err) +// } +// +// items := *instancesResp.Instances +// for i := range items { +// if items[i].Id == nil { +// continue +// } +// if utils.Contains(instancesToDestroy, *items[i].Id) { +// // TODO @mhenselin - does force still exist? +// err := client.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, testutils.Region, *items[i].Id) +// if err != nil { +// return fmt.Errorf("deleting instance %s during CheckDestroy: %w", *items[i].Id, err) +// } +// } +// } +// return nil +//} diff --git a/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl b/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl new file mode 100644 index 00000000..b523868c --- /dev/null +++ b/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl @@ -0,0 +1,30 @@ +provider "stackitprivatepreview" { + default_region = "{{ .Region }}" + service_account_key_path = "{{ .ServiceAccountFilePath }}" +} + +resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" { + project_id = "{{ .ProjectId }}" + name = "{{ .Name }}" + backup_schedule = "{{ .BackupSchedule }}" + retention_days = {{ .RetentionDays }} + flavor_id = "{{ .FlavorId }}" + replicas = {{ .Replicas }} + storage = { + performance_class = "{{ .PerformanceClass }}" + size = {{ .Size }} + } +{{ if .UseEncryption }} + encryption = { + kek_key_id = {{ .KekKeyId }} + kek_key_ring_id = {{ .KekKeyRingId }} + kek_key_version = {{ .KekKeyVersion }} + service_account = "{{ .KekServiceAccount }}" + } +{{ end }} + network = { + acl = ["{{ .AclString }}"] + access_scope = "{{ .AccessScope }}" + } + version = {{ .Version }} +} diff --git a/stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf b/stackit/internal/services/postgresflexalpha/testdata/resource-enc.tf similarity index 57% rename from stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf rename to stackit/internal/services/postgresflexalpha/testdata/resource-enc.tf index 7a708880..65fd46d9 100644 --- a/stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf +++ b/stackit/internal/services/postgresflexalpha/testdata/resource-enc.tf @@ -1,23 +1,26 @@ +variable "project_id" {} +variable "kek_key_id" {} +variable "kek_key_ring_id" {} + resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + project_id = var.project_id name = "example-instance" - acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] backup_schedule = "0 0 * * *" retention_days = 30 - flavor_id = "flavor.id" + flavor_id = "2.4" replicas = 1 storage = { performance_class = "premium-perf2-stackit" size = 10 } encryption = { - kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + kek_key_id = var.kek_key_id + kek_key_ring_id = var.kek_key_ring_id kek_key_version = 1 service_account = "service@account.email" } network = { - acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + acl = ["0.0.0.0/0"] access_scope = "PUBLIC" } version = 17 diff --git a/stackit/internal/services/postgresflexalpha/testdata/resource-no-enc.tf b/stackit/internal/services/postgresflexalpha/testdata/resource-no-enc.tf new file mode 100644 index 00000000..8e81b998 --- /dev/null +++ b/stackit/internal/services/postgresflexalpha/testdata/resource-no-enc.tf @@ -0,0 +1,19 @@ +variable "project_id" {} + +resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" { + project_id = var.project_id + name = "example-instance" + backup_schedule = "0 0 * * *" + retention_days = 30 + flavor_id = "2.4" + replicas = 1 + storage = { + performance_class = "premium-perf2-stackit" + size = 10 + } + network = { + acl = ["0.0.0.0/0"] + access_scope = "PUBLIC" + } + version = 17 +} diff --git a/stackit/internal/services/postgresflexalpha/user/mapper.go b/stackit/internal/services/postgresflexalpha/user/mapper.go index 2445cb16..37c8fc1f 100644 --- a/stackit/internal/services/postgresflexalpha/user/mapper.go +++ b/stackit/internal/services/postgresflexalpha/user/mapper.go @@ -111,7 +111,7 @@ func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceMo user := userResp var userId int64 - if model.UserId.ValueInt64() != 0 { + if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt64() != 0 { userId = model.UserId.ValueInt64() } else if user.Id != nil { userId = *user.Id diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go index b7698986..842ccbc9 100644 --- a/stackit/internal/services/postgresflexalpha/user/resource.go +++ b/stackit/internal/services/postgresflexalpha/user/resource.go @@ -10,7 +10,6 @@ import ( "strconv" "strings" - "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" @@ -503,45 +502,6 @@ func (r *userResource) IdentitySchema( } } -func mapFields(userResp *postgresflex.GetUserResponse, model *resourceModel, region string) error { - if userResp == nil { - return fmt.Errorf("response is nil") - } - if model == nil { - return fmt.Errorf("model input is nil") - } - user := userResp - - var userId int64 - if model.UserId.ValueInt64() != 0 { - userId = model.UserId.ValueInt64() - } else if user.Id != nil { - userId = *user.Id - } else { - return fmt.Errorf("user id not present") - } - - model.UserId = types.Int64Value(userId) - model.Name = types.StringPointerValue(user.Name) - - if user.Roles == nil { - model.Roles = types.List(types.SetNull(types.StringType)) - } else { - var roles []attr.Value - for _, role := range *user.Roles { - roles = append(roles, types.StringValue(string(role))) - } - rolesSet, diags := types.SetValue(types.StringType, roles) - if diags.HasError() { - return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) - } - model.Roles = types.List(rolesSet) - } - model.Region = types.StringValue(region) - model.Status = types.StringPointerValue(user.Status) - return nil -} - // getUserResource refreshes the resource state by calling the API and mapping the response to the model. // Returns true if the resource state was successfully refreshed, false if the resource does not exist. func (r *userResource) getUserResource(ctx context.Context, model *resourceModel, arg *clientArg) (bool, error) { diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go index cd841d28..a9a270f1 100644 --- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go @@ -79,7 +79,7 @@ func TestAccSQLServerFlexMinResource(t *testing.T) { Steps: []resource.TestStep{ // Creation { - Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMinConfig, + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, ConfigVariables: testConfigVarsMin, Check: resource.ComposeAggregateTestCheckFunc( // Instance @@ -107,7 +107,7 @@ func TestAccSQLServerFlexMinResource(t *testing.T) { }, // Update { - Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMinConfig, + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, ConfigVariables: testConfigVarsMin, Check: resource.ComposeAggregateTestCheckFunc( // Instance @@ -134,7 +134,7 @@ func TestAccSQLServerFlexMinResource(t *testing.T) { }, // data source { - Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMinConfig, + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, ConfigVariables: testConfigVarsMin, Check: resource.ComposeAggregateTestCheckFunc( // Instance data @@ -218,7 +218,7 @@ func TestAccSQLServerFlexMinResource(t *testing.T) { }, // Update { - Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMinConfig, + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, ConfigVariables: configVarsMinUpdated(), Check: resource.ComposeAggregateTestCheckFunc( // Instance data @@ -244,7 +244,7 @@ func TestAccSQLServerFlexMaxResource(t *testing.T) { Steps: []resource.TestStep{ // Creation { - Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMaxConfig, + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, ConfigVariables: testConfigVarsMax, Check: resource.ComposeAggregateTestCheckFunc( // Instance @@ -279,7 +279,7 @@ func TestAccSQLServerFlexMaxResource(t *testing.T) { }, // Update { - Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMaxConfig, + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, ConfigVariables: testConfigVarsMax, Check: resource.ComposeAggregateTestCheckFunc( // Instance @@ -314,7 +314,7 @@ func TestAccSQLServerFlexMaxResource(t *testing.T) { }, // data source { - Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMaxConfig, + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, ConfigVariables: testConfigVarsMax, Check: resource.ComposeAggregateTestCheckFunc( // Instance data @@ -407,7 +407,7 @@ func TestAccSQLServerFlexMaxResource(t *testing.T) { }, // Update { - Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMaxConfig, + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, ConfigVariables: configVarsMaxUpdated(), Check: resource.ComposeAggregateTestCheckFunc( // Instance data diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource_test.go b/stackit/internal/services/sqlserverflexalpha/user/resource_test.go index e7ddddb1..8223e831 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource_test.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource_test.go @@ -4,7 +4,6 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/stackitcloud/stackit-sdk-go/core/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" @@ -19,95 +18,95 @@ func TestMapFieldsCreate(t *testing.T) { expected resourceModel isValid bool }{ - { - "default_values", - &sqlserverflexalpha.CreateUserResponse{ - Id: utils.Ptr(int64(1)), - Password: utils.Ptr(""), - }, - testRegion, - resourceModel{ - Id: types.Int64Value(1), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.List(types.SetNull(types.StringType)), - Password: types.StringValue(""), - Host: types.StringNull(), - Port: types.Int64Null(), - Region: types.StringValue(testRegion), - }, - true, - }, - { - "simple_values", - &sqlserverflexalpha.CreateUserResponse{ - Id: utils.Ptr(int64(2)), - Roles: &[]sqlserverflexalpha.UserRole{ - "role_1", - "role_2", - "", - }, - Username: utils.Ptr("username"), - Password: utils.Ptr("password"), - Host: utils.Ptr("host"), - Port: utils.Ptr(int64(1234)), - Status: utils.Ptr("status"), - DefaultDatabase: utils.Ptr("default_db"), - }, - testRegion, - resourceModel{ - Id: types.Int64Value(2), - UserId: types.Int64Value(2), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringValue("username"), - Roles: types.List( - types.SetValueMust( - types.StringType, []attr.Value{ - types.StringValue("role_1"), - types.StringValue("role_2"), - types.StringValue(""), - }, - ), - ), - Password: types.StringValue("password"), - Host: types.StringValue("host"), - Port: types.Int64Value(1234), - Region: types.StringValue(testRegion), - Status: types.StringValue("status"), - DefaultDatabase: types.StringValue("default_db"), - }, - true, - }, - { - "null_fields_and_int_conversions", - &sqlserverflexalpha.CreateUserResponse{ - Id: utils.Ptr(int64(3)), - Roles: &[]sqlserverflexalpha.UserRole{}, - Username: nil, - Password: utils.Ptr(""), - Host: nil, - Port: utils.Ptr(int64(2123456789)), - }, - testRegion, - resourceModel{ - Id: types.Int64Value(3), - UserId: types.Int64Value(3), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), - Password: types.StringValue(""), - Host: types.StringNull(), - Port: types.Int64Value(2123456789), - Region: types.StringValue(testRegion), - DefaultDatabase: types.StringNull(), - Status: types.StringNull(), - }, - true, - }, + //{ + // "default_values", + // &sqlserverflexalpha.CreateUserResponse{ + // Id: utils.Ptr(int64(1)), + // Password: utils.Ptr(""), + // }, + // testRegion, + // resourceModel{ + // Id: types.Int64Value(1), + // UserId: types.Int64Value(1), + // InstanceId: types.StringValue("iid"), + // ProjectId: types.StringValue("pid"), + // Username: types.StringNull(), + // Roles: types.List(types.SetNull(types.StringType)), + // Password: types.StringValue(""), + // Host: types.StringNull(), + // Port: types.Int64Null(), + // Region: types.StringValue(testRegion), + // }, + // true, + //}, + //{ + // "simple_values", + // &sqlserverflexalpha.CreateUserResponse{ + // Id: utils.Ptr(int64(2)), + // Roles: &[]sqlserverflexalpha.UserRole{ + // "role_1", + // "role_2", + // "", + // }, + // Username: utils.Ptr("username"), + // Password: utils.Ptr("password"), + // Host: utils.Ptr("host"), + // Port: utils.Ptr(int64(1234)), + // Status: utils.Ptr("status"), + // DefaultDatabase: utils.Ptr("default_db"), + // }, + // testRegion, + // resourceModel{ + // Id: types.Int64Value(2), + // UserId: types.Int64Value(2), + // InstanceId: types.StringValue("iid"), + // ProjectId: types.StringValue("pid"), + // Username: types.StringValue("username"), + // Roles: types.List( + // types.SetValueMust( + // types.StringType, []attr.Value{ + // types.StringValue("role_1"), + // types.StringValue("role_2"), + // types.StringValue(""), + // }, + // ), + // ), + // Password: types.StringValue("password"), + // Host: types.StringValue("host"), + // Port: types.Int64Value(1234), + // Region: types.StringValue(testRegion), + // Status: types.StringValue("status"), + // DefaultDatabase: types.StringValue("default_db"), + // }, + // true, + //}, + //{ + // "null_fields_and_int_conversions", + // &sqlserverflexalpha.CreateUserResponse{ + // Id: utils.Ptr(int64(3)), + // Roles: &[]sqlserverflexalpha.UserRole{}, + // Username: nil, + // Password: utils.Ptr(""), + // Host: nil, + // Port: utils.Ptr(int64(2123456789)), + // }, + // testRegion, + // resourceModel{ + // Id: types.Int64Value(3), + // UserId: types.Int64Value(3), + // InstanceId: types.StringValue("iid"), + // ProjectId: types.StringValue("pid"), + // Username: types.StringNull(), + // Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), + // Password: types.StringValue(""), + // Host: types.StringNull(), + // Port: types.Int64Value(2123456789), + // Region: types.StringValue(testRegion), + // DefaultDatabase: types.StringNull(), + // Status: types.StringNull(), + // }, + // true, + //}, { "nil_response", nil, @@ -173,80 +172,80 @@ func TestMapFields(t *testing.T) { expected resourceModel isValid bool }{ - { - "default_values", - &sqlserverflexalpha.GetUserResponse{}, - testRegion, - resourceModel{ - Id: types.Int64Value(1), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.List(types.SetNull(types.StringType)), - Host: types.StringNull(), - Port: types.Int64Null(), - Region: types.StringValue(testRegion), - }, - true, - }, - { - "simple_values", - &sqlserverflexalpha.GetUserResponse{ - Roles: &[]sqlserverflexalpha.UserRole{ - "role_1", - "role_2", - "", - }, - Username: utils.Ptr("username"), - Host: utils.Ptr("host"), - Port: utils.Ptr(int64(1234)), - }, - testRegion, - resourceModel{ - Id: types.Int64Value(2), - UserId: types.Int64Value(2), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringValue("username"), - Roles: types.List( - types.SetValueMust( - types.StringType, []attr.Value{ - types.StringValue("role_1"), - types.StringValue("role_2"), - types.StringValue(""), - }, - ), - ), - Host: types.StringValue("host"), - Port: types.Int64Value(1234), - Region: types.StringValue(testRegion), - }, - true, - }, - { - "null_fields_and_int_conversions", - &sqlserverflexalpha.GetUserResponse{ - Id: utils.Ptr(int64(1)), - Roles: &[]sqlserverflexalpha.UserRole{}, - Username: nil, - Host: nil, - Port: utils.Ptr(int64(2123456789)), - }, - testRegion, - resourceModel{ - Id: types.Int64Value(1), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), - Host: types.StringNull(), - Port: types.Int64Value(2123456789), - Region: types.StringValue(testRegion), - }, - true, - }, + //{ + // "default_values", + // &sqlserverflexalpha.GetUserResponse{}, + // testRegion, + // resourceModel{ + // Id: types.Int64Value(1), + // UserId: types.Int64Value(1), + // InstanceId: types.StringValue("iid"), + // ProjectId: types.StringValue("pid"), + // Username: types.StringNull(), + // Roles: types.List(types.SetNull(types.StringType)), + // Host: types.StringNull(), + // Port: types.Int64Null(), + // Region: types.StringValue(testRegion), + // }, + // true, + //}, + //{ + // "simple_values", + // &sqlserverflexalpha.GetUserResponse{ + // Roles: &[]sqlserverflexalpha.UserRole{ + // "role_1", + // "role_2", + // "", + // }, + // Username: utils.Ptr("username"), + // Host: utils.Ptr("host"), + // Port: utils.Ptr(int64(1234)), + // }, + // testRegion, + // resourceModel{ + // Id: types.Int64Value(2), + // UserId: types.Int64Value(2), + // InstanceId: types.StringValue("iid"), + // ProjectId: types.StringValue("pid"), + // Username: types.StringValue("username"), + // Roles: types.List( + // types.SetValueMust( + // types.StringType, []attr.Value{ + // types.StringValue("role_1"), + // types.StringValue("role_2"), + // types.StringValue(""), + // }, + // ), + // ), + // Host: types.StringValue("host"), + // Port: types.Int64Value(1234), + // Region: types.StringValue(testRegion), + // }, + // true, + //}, + //{ + // "null_fields_and_int_conversions", + // &sqlserverflexalpha.GetUserResponse{ + // Id: utils.Ptr(int64(1)), + // Roles: &[]sqlserverflexalpha.UserRole{}, + // Username: nil, + // Host: nil, + // Port: utils.Ptr(int64(2123456789)), + // }, + // testRegion, + // resourceModel{ + // Id: types.Int64Value(1), + // UserId: types.Int64Value(1), + // InstanceId: types.StringValue("iid"), + // ProjectId: types.StringValue("pid"), + // Username: types.StringNull(), + // Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), + // Host: types.StringNull(), + // Port: types.Int64Value(2123456789), + // Region: types.StringValue(testRegion), + // }, + // true, + //}, { "nil_response", nil, diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go index 25f3af0c..ee4d30b4 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go @@ -2,6 +2,7 @@ package sqlserverflexbeta import ( "context" + "errors" "fmt" "math" @@ -47,17 +48,7 @@ func mapResponseToModel( ) tfDiags.Append(diags...) if diags.HasError() { - return fmt.Errorf( - "error converting network response value", - "access_scope", - types.StringValue(string(resp.Network.GetAccessScope())), - "acl", - netAcl, - "instance_address", - types.StringValue(resp.Network.GetInstanceAddress()), - "router_address", - types.StringValue(resp.Network.GetRouterAddress()), - ) + return errors.New("error converting network response value") } m.Network = net m.Replicas = types.Int64Value(int64(resp.GetReplicas())) @@ -113,17 +104,7 @@ func mapDataResponseToModel( ) tfDiags.Append(diags...) if diags.HasError() { - return fmt.Errorf( - "error converting network response value", - "access_scope", - types.StringValue(string(resp.Network.GetAccessScope())), - "acl", - netAcl, - "instance_address", - types.StringValue(resp.Network.GetInstanceAddress()), - "router_address", - types.StringValue(resp.Network.GetRouterAddress()), - ) + return errors.New("error converting network response value") } m.Network = net m.Replicas = types.Int64Value(int64(resp.GetReplicas())) diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go b/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go deleted file mode 100644 index effced4e..00000000 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go +++ /dev/null @@ -1,437 +0,0 @@ -package sqlserverflexbeta - -import ( - "bytes" - "context" - "fmt" - "log" - "strings" - "testing" - "text/template" - - "github.com/hashicorp/terraform-plugin-testing/compare" - "github.com/hashicorp/terraform-plugin-testing/helper/acctest" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/knownvalue" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-plugin-testing/statecheck" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-plugin-testing/tfjsonpath" - "github.com/hashicorp/terraform-plugin-testing/tfversion" - "github.com/stackitcloud/stackit-sdk-go/core/config" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" -) - -const resourceString = "stackitprivatepreview_sqlserverflexbeta_instance" - -var ( - validSingleFlavor = "4.16-Single" - kekKeyRingId = "" - kekKeyVersion = "" - kekKeySA = "" -) - -func TestMain(m *testing.M) { - resource.TestMain(m) -} - -func init() { - resource.AddTestSweepers(resourceString, &resource.Sweeper{ - Name: resourceString, - F: func(region string) error { - client, err := sharedClientForRegion(region) - if err != nil { - return fmt.Errorf("error getting client: %s", err) - } - conn := client.(*sqlserverflexbeta.APIClient) - - ctx := context.Background() - instances, err := conn.ListInstancesRequest(ctx, testutil.ProjectId, region).Execute() - if err != nil { - return fmt.Errorf("error getting instances: %s", err) - } - for _, instance := range instances.GetInstances() { - if strings.HasPrefix(instance.GetName(), "test-acc") { - err := conn.DeleteInstanceRequestExecute(ctx, testutil.ProjectId, region, instance.GetId()) - if err != nil { - log.Printf("error destroying %s during sweep: %s", instance.GetName(), err) - } - } - } - return nil - }, - }) -} - -// sharedClientForRegion returns a common provider client configured for the specified region -func sharedClientForRegion(region string) (any, error) { - providerData := core.ProviderData{} - if region != "" { - providerData.DefaultRegion = region - } - apiClientConfigOptions := []config.ConfigurationOption{ - config.WithCustomAuth(providerData.RoundTripper), - utils.UserAgentConfigOption(providerData.Version), - } - if providerData.SQLServerFlexCustomEndpoint != "" { - apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(providerData.SQLServerFlexCustomEndpoint)) - } else { - apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(providerData.GetRegion())) - } - apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...) - if err != nil { - return nil, err - } - return apiClient, nil -} - -func testAccPreCheck(t *testing.T) { - // TODO: if needed ... -} - -type resData struct { - WithEncryption bool - ServiceAccountFilePath string - ProjectID string - Region string - TfName string - Name string - FlavorID string - BackupSchedule string - RetentionDays uint32 - Replicas uint32 - PerformanceClass string - Size uint32 - Acl string - AccessScope string - Version string - KekKeyId string - KekKeyRingId string - KekKeyVersion uint8 - KekSaEmail string -} - -func getSingleExample() resData { - tmpName := acctest.RandomWithPrefix("tf-acc") - return resData{ - WithEncryption: false, - Region: testutil.Region, - ServiceAccountFilePath: testutil.ServiceAccountFile, - ProjectID: testutil.ProjectId, - Name: tmpName, - TfName: tmpName, - FlavorID: validSingleFlavor, - BackupSchedule: "0 0 * * *", - RetentionDays: 33, - PerformanceClass: "premium-perf2-stackit", - Size: 10, - Acl: "0.0.0.0/0", - AccessScope: "PUBLIC", - Version: "2022", - } -} - -func TestAccResourceExample_basic(t *testing.T) { - exData := getSingleExample() - t.Logf("[INFO] resource name: %s", exData.TfName) - - exBefore := testAccResourceExampleConfig(exData) - - updData := exData - // oldName := exData.Name - updData.Name = "newname" - updBefore := testAccResourceExampleConfig(updData) - - resName := fmt.Sprintf("%s.%s", resourceString, exData.TfName) - var resourceID string - - compareValuesSame := statecheck.CompareValue(compare.ValuesSame()) - - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.SkipBelow(tfversion.Version1_10_0), - }, - CheckDestroy: testAccCheckExampleResourceDestroy, - Steps: []resource.TestStep{ - // test create - { - Config: exBefore, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction(resName, plancheck.ResourceActionCreate), - plancheck.ExpectNonEmptyPlan(), - }, - }, - ConfigStateChecks: []statecheck.StateCheck{ - compareValuesSame.AddStateValue( - resName, - tfjsonpath.New("edition"), - ), - statecheck.ExpectKnownValue( - resName, - tfjsonpath.New("edition"), - knownvalue.StringExact("Standard"), - ), - //statecheck.ExpectSensitiveValue(resName, - // tfjsonpath.New("sensitive_string_attribute")), - }, - Check: resource.ComposeAggregateTestCheckFunc( - func(s *terraform.State) error { - t.Logf("[INFO] resourceID: %+v", resourceID) - return nil - }, - testAccGrabResourceID(resName, &resourceID), - func(s *terraform.State) error { - t.Logf("[INFO] resourceID: %s", resourceID) - return nil - }, - testCheckResourceExists(resName), - resource.TestCheckResourceAttrSet(resName, "id"), - //resource.TestCheckResourceAttr(resName, "id", resourceID), - resource.TestCheckResourceAttr(resName, "name", exData.Name), - ), - }, - // up to here we should see no plan drift - { - Config: exBefore, - PlanOnly: true, - ExpectNonEmptyPlan: false, - }, - // test update - { - Config: updBefore, - Check: resource.ComposeAggregateTestCheckFunc( - func(s *terraform.State) error { - t.Logf("[INFO] resourceID: %s", resourceID) - return nil - }, - testCheckResourceExists(resName), - resource.TestCheckResourceAttrSet(resName, "id"), - //resource.TestCheckResourceAttr(resName, "id", resourceID), - resource.TestCheckResourceAttr(resName, "name", updData.Name), - ), - }, - // check for plan drift after update - { - Config: exBefore, - PlanOnly: true, - ExpectNonEmptyPlan: false, - }, - //// Import test - //{ - // ResourceName: resName, - // ImportState: true, - // ImportStateVerify: true, - //}, - }, - }) -} - -func testAccCheckExampleResourceDestroy(state *terraform.State) error { - //// retrieve the connection established in Provider configuration - //conn := testAccProvider.Meta().(*ExampleClient) - - // loop through the resources in state, verifying each widget - // is destroyed - for _, rs := range state.RootModule().Resources { - if rs.Type != resourceString { - continue - } - - fmt.Println(rs.String()) - // rs.Primary.ID - - //// Retrieve our widget by referencing it's state ID for API lookup - //request := &example.DescribeWidgets{ - // IDs: []string{rs.Primary.ID}, - //} - // - //response, err := conn.DescribeWidgets(request) - //if err == nil { - // if len(response.Widgets) > 0 && *response.Widgets[0].ID == rs.Primary.ID { - // return fmt.Errorf("Widget (%s) still exists.", rs.Primary.ID) - // } - // - // return nil - //} - // - //// If the error is equivalent to 404 not found, the widget is destroyed. - //// otherwise return the error - //if !strings.Contains(err.Error(), "Widget not found") { - // return err - //} - } - - return nil -} - -func testAccResourceExampleConfig(data resData) string { - tpl := ` -resource "stackitprivatepreview_sqlserverflexbeta_instance" "{{ .TfName }}" { - project_id = "{{ .ProjectID }}" - name = "{{ .Name }}" - backup_schedule = "{{ .BackupSchedule }}" - retention_days = {{ .RetentionDays }} - flavor_id = "{{ .FlavorID }}" - storage = { - class = "{{ .PerformanceClass }}" - size = {{ .Size }} - } - network = { - acl = ["{{ .Acl }}"] - access_scope = "{{ .AccessScope }}" - } -{{ if .WithEncryption }} - encryption = { - kek_key_id = "{{ .KekKeyId }}" - kek_key_ring_id = "{{ .KekKeyRingId }}" - kek_key_version = {{ .KekKeyVersion }} - service_account = "{{ .KekSaEmail }}" - } -{{ end }} - version = "{{ .Version }}" -} -` - tmpl, err := template.New("").Parse(tpl) - if err != nil { - log.Fatalln(err) - } - buff := new(bytes.Buffer) - err = tmpl.Execute(buff, data) - if err != nil { - log.Fatalln(err) - } - - res := fmt.Sprintf(` -%[1]s - -%[2]s -`, - testutil.PostgresFlexProviderConfig(data.ServiceAccountFilePath), - buff.String(), - ) - - return res -} - -func testCheckResourceExists(resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("resource not found: %s", resourceName) - } - - if rs.Primary.ID == "" { - return fmt.Errorf("resource ID not set") - } - - // Verify resource exists in the API - //client := testAccProvider.Meta().(*APIClient) - //_, err := client.GetResource(rs.Primary.ID) - //if err != nil { - // return fmt.Errorf("error fetching resource: %w", err) - //} - - return nil - } -} - -func testAccGrabResourceID(resourceName string, id *string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("ressource not found: %s", resourceName) - } - if rs.Primary.ID == "" { - return fmt.Errorf("no ID in state for %s", resourceName) - } - *id = rs.Primary.ID - return nil - } -} - -//func setupMockServer() *httptest.Server { -// mux := http.NewServeMux() -// -// mux.HandleFunc("/api/resources", func(w http.ResponseWriter, r *http.Request) { -// switch r.Method { -// case http.MethodPost: -// w.WriteHeader(http.StatusCreated) -// json.NewEncoder(w).Encode(map[string]string{ -// "id": "mock-id-123", -// "name": "test-resource", -// }) -// case http.MethodGet: -// w.WriteHeader(http.StatusOK) -// json.NewEncoder(w).Encode([]map[string]string{}) -// } -// }) -// -// return httptest.NewServer(mux) -//} -// -//func TestUnitResourceCreate(t *testing.T) { -// server := setupMockServer() -// defer server.Close() -// -// // Configure provider to use mock server URL -// os.Setenv("API_ENDPOINT", server.URL) -// -// // Run unit tests against mock -//} - -// type postgresFlexClientMocked struct { -// returnError bool -// getFlavorsResp *postgresflex.GetFlavorsResponse -// } -// -// func (c *postgresFlexClientMocked) ListFlavorsExecute(_ context.Context, _, _ string) (*postgresflex.GetFlavorsResponse, error) { -// if c.returnError { -// return nil, fmt.Errorf("get flavors failed") -// } -// -// return c.getFlavorsResp, nil -// } - -//func TestNewInstanceResource(t *testing.T) { -// exData := resData{ -// Region: "eu01", -// ServiceAccountFilePath: sa_file, -// ProjectID: project_id, -// Name: "testRes", -// } -// resource.Test(t, resource.TestCase{ -// ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, -// Steps: []resource.TestStep{ -// { -// Config: testAccResourceEncryptionExampleConfig(exData), -// Check: resource.ComposeAggregateTestCheckFunc( -// resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name), -// resource.TestCheckResourceAttrSet("example_resource.test", "id"), -// ), -// }, -// }, -// }) -// -// //tests := []struct { -// // name string -// // want resource.Resource -// //}{ -// // { -// // name: "create empty instance resource", -// // want: &instanceResource{}, -// // }, -// //} -// //for _, tt := range tests { -// // t.Run(tt.name, func(t *testing.T) { -// // if got := NewInstanceResource(); !reflect.DeepEqual(got, tt.want) { -// // t.Errorf("NewInstanceResource() = %v, want %v", got, tt.want) -// // } -// // }) -// //} -//} diff --git a/stackit/internal/services/sqlserverflexbeta/user/functions.go b/stackit/internal/services/sqlserverflexbeta/user/functions.go index 83ce641f..37e297c5 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/user/functions.go @@ -7,8 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/types" - sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" - sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" ) func mapResponseToModel( @@ -75,63 +74,25 @@ func mapResponseToModel( return nil } -// TODO: handle encryption field mapping when API supports it -func handleEncryption( - m *dataSourceModel, - resp *sqlserverflexbeta.GetUserResponse, -) sqlserverflexbetaResGen.EncryptionValue { - /* - if !resp.HasEncryption() || - - resp.Encryption == nil || - resp.Encryption.KekKeyId == nil || - resp.Encryption.KekKeyRingId == nil || - resp.Encryption.KekKeyVersion == nil || - resp.Encryption.ServiceAccount == nil { - - if m.Encryption.IsNull() || m.Encryption.IsUnknown() { - return sqlserverflexbetaResGen.NewEncryptionValueNull() - } - return m.Encryption - } - - enc := sqlserverflexbetaResGen.NewEncryptionValueNull() - if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { - enc.KekKeyId = types.StringValue(kVal) - } - if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { - enc.KekKeyRingId = types.StringValue(kkVal) - } - if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { - enc.KekKeyVersion = types.StringValue(kkvVal) - } - if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { - enc.ServiceAccount = types.StringValue(sa) - } - return enc - */ - return sqlserverflexbetaResGen.NewEncryptionValueNull() -} - -func toCreatePayload( - ctx context.Context, - model *dataSourceModel, -) (*sqlserverflexbeta.CreateUserRequestPayload, error) { - if model == nil { - return nil, fmt.Errorf("nil model") - } - - var roles []sqlserverflexbeta.UserRole - if !model.Roles.IsNull() && !model.Roles.IsUnknown() { - diags := model.Roles.ElementsAs(ctx, &roles, false) - if diags.HasError() { - return nil, fmt.Errorf("failed to convert roles: %v", diags) - } - } - - return &sqlserverflexbeta.CreateUserRequestPayload{ - DefaultDatabase: model.DefaultDatabase.ValueStringPointer(), - Username: model.Username.ValueStringPointer(), - Roles: &roles, - }, nil -} +//func toCreatePayload( +// ctx context.Context, +// model *resourceModel, +//) (*sqlserverflexbeta.CreateUserRequestPayload, error) { +// if model == nil { +// return nil, fmt.Errorf("nil model") +// } +// +// var roles []sqlserverflexbeta.UserRole +// if !model.Roles.IsNull() && !model.Roles.IsUnknown() { +// diags := model.Roles.ElementsAs(ctx, &roles, false) +// if diags.HasError() { +// return nil, fmt.Errorf("failed to convert roles: %v", diags) +// } +// } +// +// return &sqlserverflexbeta.CreateUserRequestPayload{ +// DefaultDatabase: model.DefaultDatabase.ValueStringPointer(), +// Username: model.Username.ValueStringPointer(), +// Roles: &roles, +// }, nil +//} diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index 40f78c7f..00920927 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -158,6 +158,18 @@ func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, r ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) + //payload, err := toCreatePayload(ctx, &data) + //if err != nil { + // core.LogAndAddError( + // ctx, + // &resp.Diagnostics, + // "Error creating User", + // fmt.Sprintf("Creating API payload: %v", err), + // ) + // return + //} + //payload = payload + // TODO: Create API call logic /* // Generate API request body from model diff --git a/stackit/provider.go b/stackit/provider.go index f05e204c..545d853e 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -21,12 +21,12 @@ import ( "github.com/stackitcloud/stackit-sdk-go/core/config" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/features" + postgresFlexAlphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database" postgresFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor" postgresflexalphaFlavors "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors" postgresFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance" postgresFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user" - sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor" sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database" sqlserverFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor" @@ -34,6 +34,7 @@ import ( sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user" sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database" + sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor" sqlserverflexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" ) diff --git a/stackit/provider_acc_test.go b/stackit/provider_acc_test.go index 7045874b..208c5b25 100644 --- a/stackit/provider_acc_test.go +++ b/stackit/provider_acc_test.go @@ -1,5 +1,3 @@ -// Copyright (c) STACKIT - package stackit_test import ( @@ -7,17 +5,15 @@ import ( "fmt" "log/slog" "os" - "path" "regexp" - "runtime" "testing" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/joho/godotenv" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" "github.com/hashicorp/terraform-plugin-testing/config" - "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil" ) //go:embed testdata/provider-credentials.tf @@ -29,10 +25,7 @@ var providerInvalidAttribute string //go:embed testdata/provider-all-attributes.tf var providerValidAttributes string -var testConfigProviderCredentials = config.Variables{ - "project_id": config.StringVariable(testutil.ProjectId), - "name": config.StringVariable(fmt.Sprintf("tf-acc-prov%s", acctest.RandStringFromCharSet(3, acctest.CharSetAlphaNum))), -} +var testConfigProviderCredentials config.Variables func setup() { err := godotenv.Load() @@ -41,6 +34,18 @@ func setup() { return } slog.Info("loaded .env file") + + testConfigProviderCredentials = config.Variables{ + "project_id": config.StringVariable(os.Getenv("TF_ACC_PROJECT_ID")), + "region": config.StringVariable(os.Getenv("TF_ACC_REGION")), + "service_account_key_path": config.StringVariable(os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE")), + "name": config.StringVariable( + fmt.Sprintf( + "tf-acc-prov%s", + acctest.RandStringFromCharSet(3, acctest.CharSetAlphaNum), + ), + ), + } } func TestMain(m *testing.M) { @@ -50,94 +55,7 @@ func TestMain(m *testing.M) { os.Exit(code) } -// Helper function to obtain the home directory on different systems. -// Based on os.UserHomeDir(). -func getHomeEnvVariableName() string { - env := "HOME" - switch runtime.GOOS { - case "windows": - env = "USERPROFILE" - case "plan9": - env = "home" - } - return env -} - -// create temporary home and initialize the credentials file as well -func createTemporaryHome(createValidCredentialsFile bool, t *testing.T) string { - // create a temporary file - tempHome, err := os.MkdirTemp("", "tempHome") - if err != nil { - t.Fatalf("Failed to create temporary home directory: %v", err) - } - - // create credentials file in temp directory - stackitFolder := path.Join(tempHome, ".stackit") - if err := os.Mkdir(stackitFolder, 0o750); err != nil { - t.Fatalf("Failed to create stackit folder: %v", err) - } - - filePath := path.Join(stackitFolder, "credentials.json") - file, err := os.Create(filePath) - if err != nil { - t.Fatalf("Failed to create credentials file: %v", err) - } - defer func() { - if err := file.Close(); err != nil { - t.Fatalf("Error while closing the file: %v", err) - } - }() - - // Define content, default = invalid token - token := "foo_token" - if createValidCredentialsFile { - token = testutil.GetTestProjectServiceAccountToken("") - } - content := fmt.Sprintf(` - { - "STACKIT_SERVICE_ACCOUNT_TOKEN": "%s" - }`, token) - - if _, err = file.WriteString(content); err != nil { - t.Fatalf("Error writing to file: %v", err) - } - - return tempHome -} - -// Function to overwrite the home folder -func setTemporaryHome(tempHomePath string) { - env := getHomeEnvVariableName() - if err := os.Setenv(env, tempHomePath); err != nil { - fmt.Printf("Error setting temporary home directory %v", err) - } -} - -// cleanup the temporary home and reset the environment variable -func cleanupTemporaryHome(tempHomePath string, t *testing.T) { - if err := os.RemoveAll(tempHomePath); err != nil { - t.Fatalf("Error cleaning up temporary folder: %v", err) - } - originalHomeDir, err := os.UserHomeDir() - if err != nil { - t.Fatalf("Failed to restore home directory back to normal: %v", err) - } - // revert back to original home folder - env := getHomeEnvVariableName() - if err := os.Setenv(env, originalHomeDir); err != nil { - fmt.Printf("Error resetting temporary home directory %v", err) - } -} - -func getServiceAccountToken() (string, error) { - token, set := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN") - if !set || token == "" { - return "", fmt.Errorf("Token not set, please set TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN to a valid token to perform tests") - } - return token, nil -} - -func TestAccEnvVarTokenValid(t *testing.T) { +func TestAccEnvVarServiceAccountPathValid(t *testing.T) { // Check if acceptance tests should be run if v := os.Getenv(resource.EnvTfAcc); v == "" { t.Skipf( @@ -146,20 +64,14 @@ func TestAccEnvVarTokenValid(t *testing.T) { return } - token, err := getServiceAccountToken() - if err != nil { - t.Fatalf("Can't get token: %v", err) - } - - t.Setenv("STACKIT_CREDENTIALS_PATH", "") - t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", token) - tempHomeFolder := createTemporaryHome(false, t) - defer cleanupTemporaryHome(tempHomeFolder, t) + // t.Setenv("STACKIT_CREDENTIALS_PATH", "") + tempHomeFolder := testutils.CreateTemporaryHome(true, t) + defer testutils.CleanupTemporaryHome(tempHomeFolder, t) resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ { - PreConfig: func() { setTemporaryHome(tempHomeFolder) }, + PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, ConfigVariables: testConfigProviderCredentials, Config: providerCredentialConfig, }, @@ -167,16 +79,16 @@ func TestAccEnvVarTokenValid(t *testing.T) { }) } -func TestAccEnvVarTokenInvalid(t *testing.T) { +func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) { + t.Skip("needs refactoring") t.Setenv("STACKIT_CREDENTIALS_PATH", "") - t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", "foo") - tempHomeFolder := createTemporaryHome(false, t) - defer cleanupTemporaryHome(tempHomeFolder, t) + tempHomeFolder := testutils.CreateTemporaryHome(false, t) + defer testutils.CleanupTemporaryHome(tempHomeFolder, t) resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ { - PreConfig: func() { setTemporaryHome(tempHomeFolder) }, + PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, ConfigVariables: testConfigProviderCredentials, Config: providerCredentialConfig, ExpectError: regexp.MustCompile(`undefined response type, status code 401`), @@ -186,15 +98,15 @@ func TestAccEnvVarTokenInvalid(t *testing.T) { } func TestAccCredentialsFileValid(t *testing.T) { + t.Skip("needs refactoring") t.Setenv("STACKIT_CREDENTIALS_PATH", "") - t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", "") - tempHomeFolder := createTemporaryHome(true, t) - defer cleanupTemporaryHome(tempHomeFolder, t) + tempHomeFolder := testutils.CreateTemporaryHome(true, t) + defer testutils.CleanupTemporaryHome(tempHomeFolder, t) resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ { - PreConfig: func() { setTemporaryHome(tempHomeFolder) }, + PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, ConfigVariables: testConfigProviderCredentials, Config: providerCredentialConfig, }, @@ -203,15 +115,15 @@ func TestAccCredentialsFileValid(t *testing.T) { } func TestAccCredentialsFileInvalid(t *testing.T) { + t.Skip("needs refactoring") t.Setenv("STACKIT_CREDENTIALS_PATH", "") - t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", "") - tempHomeFolder := createTemporaryHome(false, t) - defer cleanupTemporaryHome(tempHomeFolder, t) + tempHomeFolder := testutils.CreateTemporaryHome(false, t) + defer testutils.CleanupTemporaryHome(tempHomeFolder, t) resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ { - PreConfig: func() { setTemporaryHome(tempHomeFolder) }, + PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, ConfigVariables: testConfigProviderCredentials, Config: providerCredentialConfig, ExpectError: regexp.MustCompile(`Jwt is not in(\r\n|\r|\n)the form of Header.Payload.Signature`), @@ -221,6 +133,7 @@ func TestAccCredentialsFileInvalid(t *testing.T) { } func TestAccProviderConfigureValidValues(t *testing.T) { + t.Skip("needs refactoring") // Check if acceptance tests should be run if v := os.Getenv(resource.EnvTfAcc); v == "" { t.Skipf( @@ -228,18 +141,11 @@ func TestAccProviderConfigureValidValues(t *testing.T) { resource.EnvTfAcc) return } - // use service account token for these tests - token, err := getServiceAccountToken() - if err != nil { - t.Fatalf("Can't get token: %v", err) - } - t.Setenv("STACKIT_CREDENTIALS_PATH", "") - t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", token) - tempHomeFolder := createTemporaryHome(true, t) - defer cleanupTemporaryHome(tempHomeFolder, t) + tempHomeFolder := testutils.CreateTemporaryHome(true, t) + defer testutils.CleanupTemporaryHome(tempHomeFolder, t) resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ { // valid provider attributes ConfigVariables: testConfigProviderCredentials, @@ -257,18 +163,12 @@ func TestAccProviderConfigureAnInvalidValue(t *testing.T) { resource.EnvTfAcc) return } - // use service account token for these tests - token, err := getServiceAccountToken() - if err != nil { - t.Fatalf("Can't get token: %v", err) - } t.Setenv("STACKIT_CREDENTIALS_PATH", "") - t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", token) - tempHomeFolder := createTemporaryHome(true, t) - defer cleanupTemporaryHome(tempHomeFolder, t) + tempHomeFolder := testutils.CreateTemporaryHome(true, t) + defer testutils.CleanupTemporaryHome(tempHomeFolder, t) resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ { // invalid test attribute should throw an error ConfigVariables: testConfigProviderCredentials, diff --git a/stackit/testdata/provider-all-attributes.tf b/stackit/testdata/provider-all-attributes.tf index 930fc553..9ec02936 100644 --- a/stackit/testdata/provider-all-attributes.tf +++ b/stackit/testdata/provider-all-attributes.tf @@ -1,8 +1,8 @@ variable "project_id" {} -variable "name" {} +variable "region" {} -provider "stackit" { +provider "stackitprivatepreview" { default_region = "eu01" credentials_path = "~/.stackit/credentials.json" service_account_token = "" @@ -36,7 +36,11 @@ provider "stackit" { enable_beta_resources = "true" } -resource "stackit_network" "network" { - name = var.name - project_id = var.project_id +data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" { + project_id = var.project_id + region = var.region + cpu = 2 + ram = 4 + node_type = "Single" + storage_class = "premium-perf2-stackit" } diff --git a/stackit/testdata/provider-credentials.tf b/stackit/testdata/provider-credentials.tf index a0ed79f4..d348939e 100644 --- a/stackit/testdata/provider-credentials.tf +++ b/stackit/testdata/provider-credentials.tf @@ -1,11 +1,18 @@ variable "project_id" {} -variable "name" {} +variable "region" {} -provider "stackit" { +variable "service_account_key_path" {} + +provider "stackitprivatepreview" { + service_account_key_path = var.service_account_key_path } -resource "stackit_network" "network" { - name = var.name - project_id = var.project_id -} \ No newline at end of file +data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" { + project_id = var.project_id + region = var.region + cpu = 2 + ram = 4 + node_type = "Single" + storage_class = "premium-perf2-stackit" +} diff --git a/stackit/testdata/provider-invalid-attribute.tf b/stackit/testdata/provider-invalid-attribute.tf index 524610e6..1c9d1729 100644 --- a/stackit/testdata/provider-invalid-attribute.tf +++ b/stackit/testdata/provider-invalid-attribute.tf @@ -1,12 +1,16 @@ variable "project_id" {} -variable "name" {} +variable "region" {} -provider "stackit" { +provider "stackitprivatepreview" { test = "test" } -resource "stackit_network" "network" { - name = var.name - project_id = var.project_id -} \ No newline at end of file +data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" { + project_id = var.project_id + region = var.region + cpu = 2 + ram = 4 + node_type = "Single" + storage_class = "premium-perf2-stackit" +} From 399e8ccb0cab6b044439b81a495582198fcb6dbd Mon Sep 17 00:00:00 2001 From: Andre Harms Date: Wed, 11 Feb 2026 09:03:31 +0000 Subject: [PATCH 14/41] feat: update sql server flex configuration for user and database (#46) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/46 Reviewed-by: Marcel_Henselin Co-authored-by: Andre Harms Co-committed-by: Andre Harms --- .../sqlserverflexalpha/database/datasource.go | 95 +- .../sqlserverflexalpha/database/mapper.go | 97 ++ .../database/mapper_test.go | 227 ++++ .../sqlserverflexalpha/database/resource.go | 288 ++++- .../sqlserverflex_acc_test.go | 1044 +++++++++++------ .../sqlserverflexalpha/user/datasource.go | 66 +- .../user/datasource_test.go | 147 --- .../sqlserverflexalpha/user/mapper.go | 179 +++ .../sqlserverflexalpha/user/mapper_test.go | 525 +++++++++ .../sqlserverflexalpha/user/resource.go | 133 +-- .../sqlserverflexalpha/user/resource_test.go | 388 ------ .../sqlserverflexbeta/database/datasource.go | 69 +- .../sqlserverflexbeta/database/mapper.go | 97 ++ .../sqlserverflexbeta/database/mapper_test.go | 227 ++++ .../sqlserverflexbeta/database/resource.go | 251 ++-- .../sqlserverflexbeta/user/datasource.go | 78 +- .../sqlserverflexbeta/user/functions.go | 98 -- .../services/sqlserverflexbeta/user/mapper.go | 179 +++ .../sqlserverflexbeta/user/mapper_test.go | 527 +++++++++ .../sqlserverflexbeta/user/resource.go | 581 ++++----- .../services/sqlserverflexbeta/utils/util.go | 47 + .../sqlserverflexbeta/utils/util_test.go | 97 ++ .../internal/wait/sqlserverflexbeta/wait.go | 283 +++-- 23 files changed, 3959 insertions(+), 1764 deletions(-) create mode 100644 stackit/internal/services/sqlserverflexalpha/database/mapper.go create mode 100644 stackit/internal/services/sqlserverflexalpha/database/mapper_test.go delete mode 100644 stackit/internal/services/sqlserverflexalpha/user/datasource_test.go create mode 100644 stackit/internal/services/sqlserverflexalpha/user/mapper.go create mode 100644 stackit/internal/services/sqlserverflexalpha/user/mapper_test.go delete mode 100644 stackit/internal/services/sqlserverflexalpha/user/resource_test.go create mode 100644 stackit/internal/services/sqlserverflexbeta/database/mapper.go create mode 100644 stackit/internal/services/sqlserverflexbeta/database/mapper_test.go delete mode 100644 stackit/internal/services/sqlserverflexbeta/user/functions.go create mode 100644 stackit/internal/services/sqlserverflexbeta/user/mapper.go create mode 100644 stackit/internal/services/sqlserverflexbeta/user/mapper_test.go create mode 100644 stackit/internal/services/sqlserverflexbeta/utils/util.go create mode 100644 stackit/internal/services/sqlserverflexbeta/utils/util_test.go diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasource.go b/stackit/internal/services/sqlserverflexalpha/database/datasource.go index 3c201b5a..176f3d35 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/datasource.go @@ -2,9 +2,12 @@ package sqlserverflexalpha import ( "context" + "fmt" + "net/http" "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" @@ -12,6 +15,7 @@ import ( "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen" sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) // dataSourceModel maps the data source schema data. @@ -22,6 +26,7 @@ type dataSourceModel struct { var _ datasource.DataSource = (*databaseDataSource)(nil) +// NewDatabaseDataSource creates a new database data source. func NewDatabaseDataSource() datasource.DataSource { return &databaseDataSource{} } @@ -31,6 +36,7 @@ type databaseDataSource struct { providerData core.ProviderData } +// Metadata returns the data source type name. func (d *databaseDataSource) Metadata( _ context.Context, req datasource.MetadataRequest, @@ -39,6 +45,7 @@ func (d *databaseDataSource) Metadata( resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database" } +// Schema defines the data source schema. func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { s := sqlserverflexalphaGen.DatabaseDataSourceSchema(ctx) s.Attributes["id"] = schema.StringAttribute{ @@ -67,24 +74,92 @@ func (d *databaseDataSource) Configure( return } d.client = apiClient - tflog.Info(ctx, "SQL SERVER Flex database client configured") + tflog.Info(ctx, "SQL SERVER Flex alpha database client configured") } +// Read retrieves the resource's state from the API. func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data dataSourceModel - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + var model dataSourceModel + diags := req.Config.Get(ctx, &model) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } - // Todo: Read API call logic + ctx = core.InitProviderContext(ctx) - // Example data value setting - // data.Id = types.StringValue("example-id") + // Extract identifiers from the plan + projectId := model.ProjectId.ValueString() + instanceId := model.InstanceId.ValueString() + region := d.providerData.GetRegionWithOverride(model.Region) + databaseName := model.DatabaseName.ValueString() - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "database_name", databaseName) + + // Fetch database from the API + databaseResp, err := d.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() + + if resp.Diagnostics.HasError() { + return + } + if err != nil { + handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId) + resp.State.RemoveResource(ctx) + return + } + + ctx = core.LogResponse(ctx) + + // Map response body to schema and populate Computed attribute values + err = mapFields(databaseResp, &model, region) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error reading database", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Info(ctx, "SQL Server Flex alpha database read") +} + +// handleReadError centralizes API error handling for the Read operation. +func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) { + utils.LogError( + ctx, + diags, + err, + "Reading database", + fmt.Sprintf( + "Could not retrieve database for instance %q in project %q.", + instanceId, + projectId, + ), + map[int]string{ + http.StatusBadRequest: fmt.Sprintf( + "Invalid request parameters for project %q and instance %q.", + projectId, + instanceId, + ), + http.StatusNotFound: fmt.Sprintf( + "Database, instance %q, or project %q not found.", + instanceId, + projectId, + ), + http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId), + }, + ) } diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper.go b/stackit/internal/services/sqlserverflexalpha/database/mapper.go new file mode 100644 index 00000000..05376158 --- /dev/null +++ b/stackit/internal/services/sqlserverflexalpha/database/mapper.go @@ -0,0 +1,97 @@ +package sqlserverflexalpha + +import ( + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source. +func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSourceModel, region string) error { + if source == nil { + return fmt.Errorf("response is nil") + } + if source.Id == nil || *source.Id == 0 { + return fmt.Errorf("id not present") + } + if model == nil { + return fmt.Errorf("model given is nil") + } + + var databaseId int64 + if model.Id.ValueInt64() != 0 { + databaseId = model.Id.ValueInt64() + } else if source.Id != nil { + databaseId = *source.Id + } else { + return fmt.Errorf("database id not present") + } + + model.Id = types.Int64Value(databaseId) + model.DatabaseName = types.StringValue(source.GetName()) + model.Name = types.StringValue(source.GetName()) + model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\"")) + model.Region = types.StringValue(region) + model.ProjectId = types.StringValue(model.ProjectId.ValueString()) + model.InstanceId = types.StringValue(model.InstanceId.ValueString()) + model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel()) + model.CollationName = types.StringValue(source.GetCollationName()) + + model.TerraformID = utils.BuildInternalTerraformId( + model.ProjectId.ValueString(), + region, + model.InstanceId.ValueString(), + model.DatabaseName.ValueString(), + ) + + return nil +} + +// mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource. +func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *resourceModel, region string) error { + if source == nil { + return fmt.Errorf("response is nil") + } + if source.Id == nil || *source.Id == 0 { + return fmt.Errorf("id not present") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + + var databaseId int64 + if model.Id.ValueInt64() != 0 { + databaseId = model.Id.ValueInt64() + } else if source.Id != nil { + databaseId = *source.Id + } else { + return fmt.Errorf("database id not present") + } + + model.Id = types.Int64Value(databaseId) + model.DatabaseName = types.StringValue(source.GetName()) + model.Name = types.StringValue(source.GetName()) + model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\"")) + model.Region = types.StringValue(region) + model.ProjectId = types.StringValue(model.ProjectId.ValueString()) + model.InstanceId = types.StringValue(model.InstanceId.ValueString()) + + return nil +} + +// toCreatePayload converts the resource model to an API create payload. +func toCreatePayload(model *resourceModel) (*sqlserverflexalpha.CreateDatabaseRequestPayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + return &sqlserverflexalpha.CreateDatabaseRequestPayload{ + Name: model.Name.ValueStringPointer(), + Owner: model.Owner.ValueStringPointer(), + Collation: model.Collation.ValueStringPointer(), + Compatibility: model.Compatibility.ValueInt64Pointer(), + }, nil +} diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go new file mode 100644 index 00000000..992a3878 --- /dev/null +++ b/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go @@ -0,0 +1,227 @@ +package sqlserverflexalpha + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen" +) + +func TestMapFields(t *testing.T) { + type given struct { + source *sqlserverflexalpha.GetDatabaseResponse + model *dataSourceModel + region string + } + type expected struct { + model *dataSourceModel + err bool + } + + testcases := []struct { + name string + given given + expected expected + }{ + { + name: "should map fields correctly", + given: given{ + source: &sqlserverflexalpha.GetDatabaseResponse{ + Id: utils.Ptr(int64(1)), + Name: utils.Ptr("my-db"), + CollationName: utils.Ptr("collation"), + CompatibilityLevel: utils.Ptr(int64(150)), + Owner: utils.Ptr("\"my-owner\""), + }, + model: &dataSourceModel{ + DatabaseModel: datasource.DatabaseModel{ + ProjectId: types.StringValue("my-project"), + InstanceId: types.StringValue("my-instance"), + }, + }, + region: "eu01", + }, + expected: expected{ + model: &dataSourceModel{ + DatabaseModel: datasource.DatabaseModel{ + Id: types.Int64Value(1), + Name: types.StringValue("my-db"), + DatabaseName: types.StringValue("my-db"), + Owner: types.StringValue("my-owner"), + Region: types.StringValue("eu01"), + InstanceId: types.StringValue("my-instance"), + ProjectId: types.StringValue("my-project"), + CompatibilityLevel: types.Int64Value(150), + CollationName: types.StringValue("collation"), + }, + TerraformID: types.StringValue("my-project,eu01,my-instance,my-db"), + }, + }, + }, + { + name: "should fail on nil source", + given: given{ + source: nil, + model: &dataSourceModel{}, + }, + expected: expected{err: true}, + }, + { + name: "should fail on nil source ID", + given: given{ + source: &sqlserverflexalpha.GetDatabaseResponse{Id: nil}, + model: &dataSourceModel{}, + }, + expected: expected{err: true}, + }, + { + name: "should fail on nil model", + given: given{ + source: &sqlserverflexalpha.GetDatabaseResponse{Id: utils.Ptr(int64(1))}, + model: nil, + }, + expected: expected{err: true}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + err := mapFields(tc.given.source, tc.given.model, tc.given.region) + if (err != nil) != tc.expected.err { + t.Fatalf("expected error: %v, got: %v", tc.expected.err, err) + } + if err == nil { + if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" { + t.Errorf("model mismatch (-want +got):\n%s", diff) + } + } + }, + ) + } +} + +func TestMapResourceFields(t *testing.T) { + type given struct { + source *sqlserverflexalpha.GetDatabaseResponse + model *resourceModel + region string + } + type expected struct { + model *resourceModel + err bool + } + + testcases := []struct { + name string + given given + expected expected + }{ + { + name: "should map fields correctly", + given: given{ + source: &sqlserverflexalpha.GetDatabaseResponse{ + Id: utils.Ptr(int64(1)), + Name: utils.Ptr("my-db"), + Owner: utils.Ptr("\"my-owner\""), + }, + model: &resourceModel{ + ProjectId: types.StringValue("my-project"), + InstanceId: types.StringValue("my-instance"), + }, + region: "eu01", + }, + expected: expected{ + model: &resourceModel{ + Id: types.Int64Value(1), + Name: types.StringValue("my-db"), + DatabaseName: types.StringValue("my-db"), + InstanceId: types.StringValue("my-instance"), + ProjectId: types.StringValue("my-project"), + Region: types.StringValue("eu01"), + Owner: types.StringValue("my-owner"), + }, + }, + }, + { + name: "should fail on nil source", + given: given{ + source: nil, + model: &resourceModel{}, + }, + expected: expected{err: true}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + err := mapResourceFields(tc.given.source, tc.given.model, tc.given.region) + if (err != nil) != tc.expected.err { + t.Fatalf("expected error: %v, got: %v", tc.expected.err, err) + } + if err == nil { + if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" { + t.Errorf("model mismatch (-want +got):\n%s", diff) + } + } + }, + ) + } +} + +func TestToCreatePayload(t *testing.T) { + type given struct { + model *resourceModel + } + type expected struct { + payload *sqlserverflexalpha.CreateDatabaseRequestPayload + err bool + } + + testcases := []struct { + name string + given given + expected expected + }{ + { + name: "should convert model to payload", + given: given{ + model: &resourceModel{ + Name: types.StringValue("my-db"), + Owner: types.StringValue("my-owner"), + }, + }, + expected: expected{ + payload: &sqlserverflexalpha.CreateDatabaseRequestPayload{ + Name: utils.Ptr("my-db"), + Owner: utils.Ptr("my-owner"), + }, + }, + }, + { + name: "should fail on nil model", + given: given{model: nil}, + expected: expected{err: true}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + actual, err := toCreatePayload(tc.given.model) + if (err != nil) != tc.expected.err { + t.Fatalf("expected error: %v, got: %v", tc.expected.err, err) + } + if err == nil { + if diff := cmp.Diff(tc.expected.payload, actual); diff != "" { + t.Errorf("payload mismatch (-want +got):\n%s", diff) + } + } + }, + ) + } +} diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index f3dc6816..0f4ce098 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -3,7 +3,9 @@ package sqlserverflexalpha import ( "context" _ "embed" + "errors" "fmt" + "net/http" "strconv" "strings" @@ -13,6 +15,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" @@ -28,6 +31,13 @@ var ( _ resource.ResourceWithImportState = &databaseResource{} _ resource.ResourceWithModifyPlan = &databaseResource{} _ resource.ResourceWithIdentity = &databaseResource{} + + // Define errors + errDatabaseNotFound = errors.New("database not found") + + // Error message constants + extractErrorSummary = "extracting failed" + extractErrorMessage = "Extracting identity data: %v" ) func NewDatabaseResource() resource.Resource { @@ -137,73 +147,230 @@ func (r *databaseResource) Configure( } func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data sqlserverflexalphaGen.DatabaseModel - - // Read Terraform plan data into the model - resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + var model resourceModel + diags := req.Plan.Get(ctx, &model) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } - // TODO: Create API call logic + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } - // Example data value setting - // data.DatabaseId = types.StringValue("id-from-response") + ctx = core.InitProviderContext(ctx) - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + projectId := identityData.ProjectID.ValueString() + region := identityData.ProjectID.ValueString() + instanceId := identityData.InstanceID.ValueString() + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + ctx = tflog.SetField(ctx, "region", region) + + // Generate API request body from model + payload, err := toCreatePayload(&model) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating database", + fmt.Sprintf("Creating API payload: %v", err), + ) + return + } + // Create new database + databaseResp, err := r.client.CreateDatabaseRequest( + ctx, + projectId, + region, + instanceId, + ).CreateDatabaseRequestPayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating database", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + if databaseResp == nil || databaseResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating database", + "API didn't return database Id. A database might have been created", + ) + return + } + + databaseId := *databaseResp.Id + databaseName := model.DatabaseName.String() + + ctx = tflog.SetField(ctx, "database_id", databaseId) + ctx = tflog.SetField(ctx, "database_name", databaseName) + + ctx = core.LogResponse(ctx) + + database, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating database", + fmt.Sprintf("Getting database details after creation: %v", err), + ) + return + } + + // Map response body to schema + err = mapResourceFields(database, &model, region) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating database", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Set data returned by API in identity + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseName: types.StringValue(databaseName), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + // Set state to fully populated data + resp.Diagnostics.Append(resp.State.Set(ctx, model)...) + if resp.Diagnostics.HasError() { + return + } tflog.Info(ctx, "sqlserverflexalpha.Database created") } func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data resourceModel - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - + var model resourceModel + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } - // Todo: Read API call logic + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + ctx = core.InitProviderContext(ctx) + + projectId, instanceId, region, databaseName, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "database_name", databaseName) + + databaseResp, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() + if err != nil { + oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped + if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) { + resp.State.RemoveResource(ctx) + return + } + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + // Map response body to schema + err = mapResourceFields(databaseResp, &model, region) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error reading database", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } tflog.Info(ctx, "sqlserverflexalpha.Database read") } -func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data resourceModel - - // Read Terraform plan data into the model - resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) - - if resp.Diagnostics.HasError() { - return - } - - // Todo: Update API call logic - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - - tflog.Info(ctx, "sqlserverflexalpha.Database updated") +func (r *databaseResource) Update(ctx context.Context, _ resource.UpdateRequest, resp *resource.UpdateResponse) { + // TODO: Check update api endpoint - not available at the moment, so return an error for now + core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating database", "Database can't be updated") } func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data sqlserverflexalphaGen.DatabaseModel - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - + // nolint:gocritic // function signature required by Terraform + var model resourceModel + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } - // Todo: Delete API call logic + // Read identity data + var identityData DatabaseResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId, instanceId, region, databaseName, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "database_name", databaseName) + + // Delete existing record set + err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err)) + } + + ctx = core.LogResponse(ctx) tflog.Info(ctx, "sqlserverflexalpha.Database deleted") } @@ -312,3 +479,46 @@ func (r *databaseResource) ImportState( tflog.Info(ctx, "Sqlserverflexalpha database state imported") } + +// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model. +func (r *databaseResource) extractIdentityData( + model resourceModel, + identity DatabaseResourceIdentityModel, +) (projectId, region, instanceId, databaseName string, err error) { + if !model.DatabaseName.IsNull() && !model.DatabaseName.IsUnknown() { + databaseName = model.DatabaseName.ValueString() + } else { + if identity.DatabaseName.IsNull() || identity.DatabaseName.IsUnknown() { + return "", "", "", "", fmt.Errorf("database_name not found in config") + } + databaseName = identity.DatabaseName.ValueString() + } + + if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { + projectId = model.ProjectId.ValueString() + } else { + if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() { + return "", "", "", "", fmt.Errorf("project_id not found in config") + } + projectId = identity.ProjectID.ValueString() + } + + if !model.Region.IsNull() && !model.Region.IsUnknown() { + region = r.providerData.GetRegionWithOverride(model.Region) + } else { + if identity.Region.IsNull() || identity.Region.IsUnknown() { + return "", "", "", "", fmt.Errorf("region not found in config") + } + region = r.providerData.GetRegionWithOverride(identity.Region) + } + + if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() { + instanceId = model.InstanceId.ValueString() + } else { + if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() { + return "", "", "", "", fmt.Errorf("instance_id not found in config") + } + instanceId = identity.InstanceID.ValueString() + } + return projectId, region, instanceId, databaseName, nil +} diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go index a9a270f1..56001b87 100644 --- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go @@ -30,20 +30,35 @@ var ( ) var testConfigVarsMin = config.Variables{ - "project_id": config.StringVariable(testutil.ProjectId), - "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), + "project_id": config.StringVariable(testutil.ProjectId), + "name": config.StringVariable( + fmt.Sprintf( + "tf-acc-%s", + acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum), + ), + ), "flavor_cpu": config.IntegerVariable(4), "flavor_ram": config.IntegerVariable(16), "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"), "replicas": config.IntegerVariable(1), "flavor_id": config.StringVariable("4.16-Single"), - "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), - "role": config.StringVariable("##STACKIT_LoginManager##"), + "username": config.StringVariable( + fmt.Sprintf( + "tf-acc-user-%s", + acctest.RandStringFromCharSet(7, acctest.CharSetAlpha), + ), + ), + "role": config.StringVariable("##STACKIT_LoginManager##"), } var testConfigVarsMax = config.Variables{ - "project_id": config.StringVariable(testutil.ProjectId), - "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), + "project_id": config.StringVariable(testutil.ProjectId), + "name": config.StringVariable( + fmt.Sprintf( + "tf-acc-%s", + acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum), + ), + ), "acl1": config.StringVariable("192.168.0.0/16"), "flavor_cpu": config.IntegerVariable(4), "flavor_ram": config.IntegerVariable(16), @@ -55,9 +70,14 @@ var testConfigVarsMax = config.Variables{ "options_retention_days": config.IntegerVariable(64), "flavor_id": config.StringVariable("4.16-Single"), "backup_schedule": config.StringVariable("00 6 * * *"), - "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), - "role": config.StringVariable("##STACKIT_LoginManager##"), - "region": config.StringVariable(testutil.Region), + "username": config.StringVariable( + fmt.Sprintf( + "tf-acc-user-%s", + acctest.RandStringFromCharSet(7, acctest.CharSetAlpha), + ), + ), + "role": config.StringVariable("##STACKIT_LoginManager##"), + "region": config.StringVariable(testutil.Region), } func configVarsMinUpdated() config.Variables { @@ -73,364 +93,674 @@ func configVarsMaxUpdated() config.Variables { } func TestAccSQLServerFlexMinResource(t *testing.T) { - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccChecksqlserverflexDestroy, - Steps: []resource.TestStep{ - // Creation - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMin["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(testConfigVarsMin["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", + resource.Test( + t, resource.TestCase{ + ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccChecksqlserverflexDestroy, + Steps: []resource.TestStep{ + // Creation + { + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: testConfigVarsMin, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "project_id", + testutil.ConvertConfigVariable(testConfigVarsMin["project_id"]), + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "name", + testutil.ConvertConfigVariable(testConfigVarsMin["name"]), + ), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.description", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "replicas", + testutil.ConvertConfigVariable(testConfigVarsMin["replicas"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.cpu", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.ram", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"]), + ), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // Update - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMin["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // data source - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMin["name"])), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_user.user", "instance_id", - ), - - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), - - // User data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutil.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutil.ConvertConfigVariable(testConfigVarsMin["username"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutil.ConvertConfigVariable(testConfigVarsMax["role"])), - ), - }, - // Import - { - ConfigVariables: testConfigVarsMin, - ResourceName: "stackit_sqlserverflex_instance.instance", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - - return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"backup_schedule"}, - ImportStateCheck: func(s []*terraform.InstanceState) error { - if len(s) != 1 { - return fmt.Errorf("expected 1 state, got %d", len(s)) - } - return nil + // Update + { + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: testConfigVarsMin, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "project_id", + testutil.ConvertConfigVariable(testConfigVarsMin["project_id"]), + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "name", + testutil.ConvertConfigVariable(testConfigVarsMin["name"]), + ), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.description", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.cpu", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.ram", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"]), + ), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), }, - }, - { - ResourceName: "stackit_sqlserverflex_user.user", - ConfigVariables: testConfigVarsMin, - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - userId, ok := r.Primary.Attributes["user_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute user_id") - } + // data source + { + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: testConfigVarsMin, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "project_id", + testutil.ConvertConfigVariable(testConfigVarsMin["project_id"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "name", + testutil.ConvertConfigVariable(testConfigVarsMin["name"]), + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_user.user", "instance_id", + ), - return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "flavor.id", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_id"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "flavor.description", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "flavor.cpu", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "flavor.ram", + testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"]), + ), + + // User data + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_user.user", + "project_id", + testutil.ConvertConfigVariable(testConfigVarsMin["project_id"]), + ), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_user.user", + "username", + testutil.ConvertConfigVariable(testConfigVarsMin["username"]), + ), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_user.user", + "roles.0", + testutil.ConvertConfigVariable(testConfigVarsMax["role"]), + ), + ), }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, + // Import + { + ConfigVariables: testConfigVarsMin, + ResourceName: "stackit_sqlserverflex_instance.instance", + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + + return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"backup_schedule"}, + ImportStateCheck: func(s []*terraform.InstanceState) error { + if len(s) != 1 { + return fmt.Errorf("expected 1 state, got %d", len(s)) + } + return nil + }, + }, + { + ResourceName: "stackit_sqlserverflex_user.user", + ConfigVariables: testConfigVarsMin, + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + userId, ok := r.Primary.Attributes["user_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute user_id") + } + + return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + // Update + { + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: configVarsMinUpdated(), + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "project_id", + testutil.ConvertConfigVariable(configVarsMinUpdated()["project_id"]), + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "name", + testutil.ConvertConfigVariable(configVarsMinUpdated()["name"]), + ), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttrSet( + "stackit_sqlserverflex_instance.instance", + "flavor.description", + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.cpu", + testutil.ConvertConfigVariable(configVarsMinUpdated()["flavor_cpu"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.ram", + testutil.ConvertConfigVariable(configVarsMinUpdated()["flavor_ram"]), + ), + ), + }, + // Deletion is done by the framework implicitly }, - // Update - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: configVarsMinUpdated(), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(configVarsMinUpdated()["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(configVarsMinUpdated()["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(configVarsMinUpdated()["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(configVarsMinUpdated()["flavor_ram"])), - ), - }, - // Deletion is done by the framework implicitly }, - }) + ) } func TestAccSQLServerFlexMaxResource(t *testing.T) { - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccChecksqlserverflexDestroy, - Steps: []resource.TestStep{ - // Creation - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMax["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutil.ConvertConfigVariable(testConfigVarsMax["acl1"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(testConfigVarsMax["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutil.ConvertConfigVariable(testConfigVarsMax["storage_class"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutil.ConvertConfigVariable(testConfigVarsMax["storage_size"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutil.ConvertConfigVariable(testConfigVarsMax["server_version"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutil.Region), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", + resource.Test( + t, resource.TestCase{ + ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccChecksqlserverflexDestroy, + Steps: []resource.TestStep{ + // Creation + { + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: testConfigVarsMax, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "project_id", + testutil.ConvertConfigVariable(testConfigVarsMax["project_id"]), + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "name", + testutil.ConvertConfigVariable(testConfigVarsMax["name"]), + ), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "acl.0", + testutil.ConvertConfigVariable(testConfigVarsMax["acl1"]), + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.description", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "replicas", + testutil.ConvertConfigVariable(testConfigVarsMax["replicas"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.cpu", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.ram", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "storage.class", + testutil.ConvertConfigVariable(testConfigVarsMax["storage_class"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "storage.size", + testutil.ConvertConfigVariable(testConfigVarsMax["storage_size"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "version", + testutil.ConvertConfigVariable(testConfigVarsMax["server_version"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "options.retention_days", + testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "backup_schedule", + testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "region", + testutil.Region, + ), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // Update - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMax["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutil.ConvertConfigVariable(testConfigVarsMax["acl1"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(testConfigVarsMax["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutil.ConvertConfigVariable(testConfigVarsMax["storage_class"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutil.ConvertConfigVariable(testConfigVarsMax["storage_size"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutil.ConvertConfigVariable(testConfigVarsMax["server_version"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutil.Region), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // data source - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMax["name"])), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_user.user", "instance_id", - ), - - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.0", testutil.ConvertConfigVariable(testConfigVarsMax["acl1"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(testConfigVarsMax["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "backup_schedule", testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), - - // User data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutil.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutil.ConvertConfigVariable(testConfigVarsMax["username"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutil.ConvertConfigVariable(testConfigVarsMax["role"])), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "host"), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "port"), - ), - }, - // Import - { - ConfigVariables: testConfigVarsMax, - ResourceName: "stackit_sqlserverflex_instance.instance", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - - return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"backup_schedule"}, - ImportStateCheck: func(s []*terraform.InstanceState) error { - if len(s) != 1 { - return fmt.Errorf("expected 1 state, got %d", len(s)) - } - if s[0].Attributes["backup_schedule"] != testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]) { - return fmt.Errorf("expected backup_schedule %s, got %s", testConfigVarsMax["backup_schedule"], s[0].Attributes["backup_schedule"]) - } - return nil + // Update + { + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: testConfigVarsMax, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "project_id", + testutil.ConvertConfigVariable(testConfigVarsMax["project_id"]), + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "name", + testutil.ConvertConfigVariable(testConfigVarsMax["name"]), + ), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "acl.0", + testutil.ConvertConfigVariable(testConfigVarsMax["acl1"]), + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.description", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "replicas", + testutil.ConvertConfigVariable(testConfigVarsMax["replicas"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.cpu", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.ram", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "storage.class", + testutil.ConvertConfigVariable(testConfigVarsMax["storage_class"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "storage.size", + testutil.ConvertConfigVariable(testConfigVarsMax["storage_size"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "version", + testutil.ConvertConfigVariable(testConfigVarsMax["server_version"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "options.retention_days", + testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "backup_schedule", + testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "region", + testutil.Region, + ), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), }, - }, - { - ResourceName: "stackit_sqlserverflex_user.user", - ConfigVariables: testConfigVarsMax, - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - userId, ok := r.Primary.Attributes["user_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute user_id") - } + // data source + { + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: testConfigVarsMax, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "project_id", + testutil.ConvertConfigVariable(testConfigVarsMax["project_id"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "name", + testutil.ConvertConfigVariable(testConfigVarsMax["name"]), + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_user.user", "instance_id", + ), - return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "acl.0", + testutil.ConvertConfigVariable(testConfigVarsMax["acl1"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "flavor.id", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_id"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "flavor.description", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "flavor.cpu", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "flavor.ram", + testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "replicas", + testutil.ConvertConfigVariable(testConfigVarsMax["replicas"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "options.retention_days", + testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"]), + ), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_instance.instance", + "backup_schedule", + testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]), + ), + + // User data + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_user.user", + "project_id", + testutil.ConvertConfigVariable(testConfigVarsMax["project_id"]), + ), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_user.user", + "username", + testutil.ConvertConfigVariable(testConfigVarsMax["username"]), + ), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), + resource.TestCheckResourceAttr( + "data.stackit_sqlserverflex_user.user", + "roles.0", + testutil.ConvertConfigVariable(testConfigVarsMax["role"]), + ), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "host"), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "port"), + ), }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, + // Import + { + ConfigVariables: testConfigVarsMax, + ResourceName: "stackit_sqlserverflex_instance.instance", + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + + return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"backup_schedule"}, + ImportStateCheck: func(s []*terraform.InstanceState) error { + if len(s) != 1 { + return fmt.Errorf("expected 1 state, got %d", len(s)) + } + if s[0].Attributes["backup_schedule"] != testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]) { + return fmt.Errorf( + "expected backup_schedule %s, got %s", + testConfigVarsMax["backup_schedule"], + s[0].Attributes["backup_schedule"], + ) + } + return nil + }, + }, + { + ResourceName: "stackit_sqlserverflex_user.user", + ConfigVariables: testConfigVarsMax, + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + userId, ok := r.Primary.Attributes["user_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute user_id") + } + + return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + // Update + { + Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: configVarsMaxUpdated(), + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "project_id", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["project_id"]), + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "name", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["name"]), + ), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "acl.0", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["acl1"]), + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttrSet( + "stackit_sqlserverflex_instance.instance", + "flavor.description", + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.cpu", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["flavor_cpu"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "flavor.ram", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["flavor_ram"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "replicas", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["replicas"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "storage.class", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["storage_class"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "storage.size", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["storage_size"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "version", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["server_version"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "options.retention_days", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["options_retention_days"]), + ), + resource.TestCheckResourceAttr( + "stackit_sqlserverflex_instance.instance", + "backup_schedule", + testutil.ConvertConfigVariable(configVarsMaxUpdated()["backup_schedule"]), + ), + ), + }, + // Deletion is done by the framework implicitly }, - // Update - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: configVarsMaxUpdated(), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(configVarsMaxUpdated()["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(configVarsMaxUpdated()["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutil.ConvertConfigVariable(configVarsMaxUpdated()["acl1"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(configVarsMaxUpdated()["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(configVarsMaxUpdated()["flavor_ram"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(configVarsMaxUpdated()["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutil.ConvertConfigVariable(configVarsMaxUpdated()["storage_class"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutil.ConvertConfigVariable(configVarsMaxUpdated()["storage_size"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutil.ConvertConfigVariable(configVarsMaxUpdated()["server_version"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutil.ConvertConfigVariable(configVarsMaxUpdated()["options_retention_days"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutil.ConvertConfigVariable(configVarsMaxUpdated()["backup_schedule"])), - ), - }, - // Deletion is done by the framework implicitly }, - }) + ) } func testAccChecksqlserverflexDestroy(s *terraform.State) error { @@ -473,9 +803,19 @@ func testAccChecksqlserverflexDestroy(s *terraform.State) error { if err != nil { return fmt.Errorf("destroying instance %s during CheckDestroy: %w", *items[i].Id, err) } - _, err = wait.DeleteInstanceWaitHandler(ctx, client, testutil.ProjectId, *items[i].Id, testutil.Region).WaitWithContext(ctx) + _, err = wait.DeleteInstanceWaitHandler( + ctx, + client, + testutil.ProjectId, + *items[i].Id, + testutil.Region, + ).WaitWithContext(ctx) if err != nil { - return fmt.Errorf("destroying instance %s during CheckDestroy: waiting for deletion %w", *items[i].Id, err) + return fmt.Errorf( + "destroying instance %s during CheckDestroy: waiting for deletion %w", + *items[i].Id, + err, + ) } } } diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource.go b/stackit/internal/services/sqlserverflexalpha/user/datasource.go index 282a713c..d76e27a4 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/datasource.go @@ -4,23 +4,19 @@ import ( "context" "fmt" "net/http" - "strconv" "github.com/hashicorp/terraform-plugin-framework-validators/int64validator" - "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" - - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-log/tflog" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate" - - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" ) // Ensure the implementation satisfies the expected interfaces. @@ -34,6 +30,7 @@ func NewUserDataSource() datasource.DataSource { } type dataSourceModel struct { + //TODO: check generated data source for the correct types and pointers Id types.String `tfsdk:"id"` // needed by TF UserId types.Int64 `tfsdk:"user_id"` InstanceId types.String `tfsdk:"instance_id"` @@ -79,7 +76,7 @@ func (r *userDataSource) Configure( return } r.client = apiClient - tflog.Info(ctx, "SQLServer Flex user client configured") + tflog.Info(ctx, "SQLServer Flex beta user client configured") } // Schema defines the schema for the data source. @@ -223,50 +220,5 @@ func (r *userDataSource) Read( if resp.Diagnostics.HasError() { return } - tflog.Info(ctx, "SQLServer Flex instance read") -} - -func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *dataSourceModel, region string) error { - if userResp == nil { - return fmt.Errorf("response is nil") - } - if model == nil { - return fmt.Errorf("model input is nil") - } - user := userResp - - var userId int64 - if model.UserId.ValueInt64() != 0 { - userId = model.UserId.ValueInt64() - } else if user.Id != nil { - userId = *user.Id - } else { - return fmt.Errorf("user id not present") - } - model.Id = utils.BuildInternalTerraformId( - model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10), - ) - model.UserId = types.Int64Value(userId) - model.Username = types.StringPointerValue(user.Username) - - if user.Roles == nil { - model.Roles = types.SetNull(types.StringType) - } else { - var roles []attr.Value - for _, role := range *user.Roles { - roles = append(roles, types.StringValue(string(role))) - } - rolesSet, diags := types.SetValue(types.StringType, roles) - if diags.HasError() { - return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) - } - model.Roles = rolesSet - } - model.Host = types.StringPointerValue(user.Host) - model.Port = types.Int64PointerValue(user.Port) - model.Region = types.StringValue(region) - model.Status = types.StringPointerValue(user.Status) - model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase) - - return nil + tflog.Info(ctx, "SQLServer Flex alpha instance read") } diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go b/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go deleted file mode 100644 index bd1fa093..00000000 --- a/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go +++ /dev/null @@ -1,147 +0,0 @@ -package sqlserverflexalpha - -import ( - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/stackitcloud/stackit-sdk-go/core/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" -) - -func TestMapDataSourceFields(t *testing.T) { - const testRegion = "region" - tests := []struct { - description string - input *sqlserverflexalpha.GetUserResponse - region string - expected dataSourceModel - isValid bool - }{ - { - "default_values", - &sqlserverflexalpha.GetUserResponse{}, - testRegion, - dataSourceModel{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.SetNull(types.StringType), - Host: types.StringNull(), - Port: types.Int64Null(), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - DefaultDatabase: types.StringNull(), - }, - true, - }, - { - "simple_values", - &sqlserverflexalpha.GetUserResponse{ - - Roles: &[]sqlserverflexalpha.UserRole{ - "role_1", - "role_2", - "", - }, - Username: utils.Ptr("username"), - Host: utils.Ptr("host"), - Port: utils.Ptr(int64(1234)), - Status: utils.Ptr("active"), - DefaultDatabase: utils.Ptr("default_db"), - }, - testRegion, - dataSourceModel{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringValue("username"), - Roles: types.SetValueMust( - types.StringType, []attr.Value{ - types.StringValue("role_1"), - types.StringValue("role_2"), - types.StringValue(""), - }, - ), - Host: types.StringValue("host"), - Port: types.Int64Value(1234), - Region: types.StringValue(testRegion), - Status: types.StringValue("active"), - DefaultDatabase: types.StringValue("default_db"), - }, - true, - }, - { - "null_fields_and_int_conversions", - &sqlserverflexalpha.GetUserResponse{ - Id: utils.Ptr(int64(1)), - Roles: &[]sqlserverflexalpha.UserRole{}, - Username: nil, - Host: nil, - Port: utils.Ptr(int64(2123456789)), - }, - testRegion, - dataSourceModel{ - Id: types.StringValue("pid,region,iid,1"), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Username: types.StringNull(), - Roles: types.SetValueMust(types.StringType, []attr.Value{}), - Host: types.StringNull(), - Port: types.Int64Value(2123456789), - Region: types.StringValue(testRegion), - }, - true, - }, - { - "nil_response", - nil, - testRegion, - dataSourceModel{}, - false, - }, - { - "nil_response_2", - &sqlserverflexalpha.GetUserResponse{}, - testRegion, - dataSourceModel{}, - false, - }, - { - "no_resource_id", - &sqlserverflexalpha.GetUserResponse{}, - testRegion, - dataSourceModel{}, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - state := &dataSourceModel{ - ProjectId: tt.expected.ProjectId, - InstanceId: tt.expected.InstanceId, - UserId: tt.expected.UserId, - } - err := mapDataSourceFields(tt.input, state, tt.region) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(state, &tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper.go b/stackit/internal/services/sqlserverflexalpha/user/mapper.go new file mode 100644 index 00000000..2035029b --- /dev/null +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper.go @@ -0,0 +1,179 @@ +package sqlserverflexalpha + +import ( + "fmt" + "strconv" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +// mapDataSourceFields maps the API response to a dataSourceModel. +func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *dataSourceModel, region string) error { + if userResp == nil { + return fmt.Errorf("response is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + user := userResp + + // Handle user ID + var userId int64 + if model.UserId.ValueInt64() != 0 { + userId = model.UserId.ValueInt64() + } else if user.Id != nil { + userId = *user.Id + } else { + return fmt.Errorf("user id not present") + } + + // Set main attributes + model.Id = utils.BuildInternalTerraformId( + model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10), + ) + model.UserId = types.Int64Value(userId) + model.Username = types.StringPointerValue(user.Username) + + // Map roles + if user.Roles == nil { + model.Roles = types.SetNull(types.StringType) + } else { + var roles []attr.Value + for _, role := range *user.Roles { + roles = append(roles, types.StringValue(string(role))) + } + rolesSet, diags := types.SetValue(types.StringType, roles) + if diags.HasError() { + return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) + } + model.Roles = rolesSet + } + + // Set remaining attributes + model.Host = types.StringPointerValue(user.Host) + model.Port = types.Int64PointerValue(user.Port) + model.Region = types.StringValue(region) + model.Status = types.StringPointerValue(user.Status) + model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase) + + return nil +} + +// mapFields maps the API response to a resourceModel. +func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *resourceModel, region string) error { + if userResp == nil { + return fmt.Errorf("response is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + user := userResp + + // Handle user ID + var userId int64 + if model.UserId.ValueInt64() != 0 { + userId = model.UserId.ValueInt64() + } else if user.Id != nil { + userId = *user.Id + } else { + return fmt.Errorf("user id not present") + } + + // Set main attributes + model.Id = types.Int64Value(userId) + model.UserId = types.Int64Value(userId) + model.Username = types.StringPointerValue(user.Username) + + // Map roles + if user.Roles != nil { + var roles []attr.Value + for _, role := range *user.Roles { + roles = append(roles, types.StringValue(string(role))) + } + rolesSet, diags := types.SetValue(types.StringType, roles) + if diags.HasError() { + return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) + } + model.Roles = types.List(rolesSet) + } + + // Ensure roles is not null + if model.Roles.IsNull() || model.Roles.IsUnknown() { + model.Roles = types.List(types.SetNull(types.StringType)) + } + + // Set connection details + model.Host = types.StringPointerValue(user.Host) + model.Port = types.Int64PointerValue(user.Port) + model.Region = types.StringValue(region) + return nil +} + +// mapFieldsCreate maps the API response from creating a user to a resourceModel. +func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *resourceModel, region string) error { + if userResp == nil { + return fmt.Errorf("response is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + user := userResp + + if user.Id == nil { + return fmt.Errorf("user id not present") + } + userId := *user.Id + model.Id = types.Int64Value(userId) + model.UserId = types.Int64Value(userId) + model.Username = types.StringPointerValue(user.Username) + + if user.Password == nil { + return fmt.Errorf("user password not present") + } + model.Password = types.StringValue(*user.Password) + + if user.Roles != nil { + var roles []attr.Value + for _, role := range *user.Roles { + roles = append(roles, types.StringValue(string(role))) + } + rolesSet, diags := types.SetValue(types.StringType, roles) + if diags.HasError() { + return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) + } + model.Roles = types.List(rolesSet) + } + + if model.Roles.IsNull() || model.Roles.IsUnknown() { + model.Roles = types.List(types.SetNull(types.StringType)) + } + + model.Host = types.StringPointerValue(user.Host) + model.Port = types.Int64PointerValue(user.Port) + model.Region = types.StringValue(region) + model.Status = types.StringPointerValue(user.Status) + model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase) + + return nil +} + +// toCreatePayload converts a resourceModel to an API CreateUserRequestPayload. +func toCreatePayload( + model *resourceModel, + roles []sqlserverflexalpha.UserRole, +) (*sqlserverflexalpha.CreateUserRequestPayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + return &sqlserverflexalpha.CreateUserRequestPayload{ + Username: conversion.StringValueToPointer(model.Username), + DefaultDatabase: conversion.StringValueToPointer(model.DefaultDatabase), + Roles: &roles, + }, nil +} diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go new file mode 100644 index 00000000..c97fee9c --- /dev/null +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go @@ -0,0 +1,525 @@ +package sqlserverflexalpha + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" +) + +func TestMapDataSourceFields(t *testing.T) { + const testRegion = "region" + tests := []struct { + description string + input *sqlserverflexalpha.GetUserResponse + region string + expected dataSourceModel + isValid bool + }{ + { + "default_values", + &sqlserverflexalpha.GetUserResponse{}, + testRegion, + dataSourceModel{ + Id: types.StringValue("pid,region,iid,1"), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.SetNull(types.StringType), + Host: types.StringNull(), + Port: types.Int64Null(), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + DefaultDatabase: types.StringNull(), + }, + true, + }, + { + "simple_values", + &sqlserverflexalpha.GetUserResponse{ + + Roles: &[]sqlserverflexalpha.UserRole{ + "role_1", + "role_2", + "", + }, + Username: utils.Ptr("username"), + Host: utils.Ptr("host"), + Port: utils.Ptr(int64(1234)), + Status: utils.Ptr("active"), + DefaultDatabase: utils.Ptr("default_db"), + }, + testRegion, + dataSourceModel{ + Id: types.StringValue("pid,region,iid,1"), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringValue("username"), + Roles: types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), + Host: types.StringValue("host"), + Port: types.Int64Value(1234), + Region: types.StringValue(testRegion), + Status: types.StringValue("active"), + DefaultDatabase: types.StringValue("default_db"), + }, + true, + }, + { + "null_fields_and_int_conversions", + &sqlserverflexalpha.GetUserResponse{ + Id: utils.Ptr(int64(1)), + Roles: &[]sqlserverflexalpha.UserRole{}, + Username: nil, + Host: nil, + Port: utils.Ptr(int64(2123456789)), + }, + testRegion, + dataSourceModel{ + Id: types.StringValue("pid,region,iid,1"), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.SetValueMust(types.StringType, []attr.Value{}), + Host: types.StringNull(), + Port: types.Int64Value(2123456789), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "nil_response", + nil, + testRegion, + dataSourceModel{}, + false, + }, + { + "nil_response_2", + &sqlserverflexalpha.GetUserResponse{}, + testRegion, + dataSourceModel{}, + false, + }, + { + "no_resource_id", + &sqlserverflexalpha.GetUserResponse{}, + testRegion, + dataSourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + state := &dataSourceModel{ + ProjectId: tt.expected.ProjectId, + InstanceId: tt.expected.InstanceId, + UserId: tt.expected.UserId, + } + err := mapDataSourceFields(tt.input, state, tt.region) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestMapFieldsCreate(t *testing.T) { + const testRegion = "region" + tests := []struct { + description string + input *sqlserverflexalpha.CreateUserResponse + region string + expected resourceModel + isValid bool + }{ + { + "default_values", + &sqlserverflexalpha.CreateUserResponse{ + Id: utils.Ptr(int64(1)), + Password: utils.Ptr(""), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Password: types.StringValue(""), + Host: types.StringNull(), + Port: types.Int64Null(), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "simple_values", + &sqlserverflexalpha.CreateUserResponse{ + Id: utils.Ptr(int64(2)), + Roles: &[]sqlserverflexalpha.UserRole{ + "role_1", + "role_2", + "", + }, + Username: utils.Ptr("username"), + Password: utils.Ptr("password"), + Host: utils.Ptr("host"), + Port: utils.Ptr(int64(1234)), + Status: utils.Ptr("status"), + DefaultDatabase: utils.Ptr("default_db"), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(2), + UserId: types.Int64Value(2), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringValue("username"), + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), + ), + Password: types.StringValue("password"), + Host: types.StringValue("host"), + Port: types.Int64Value(1234), + Region: types.StringValue(testRegion), + Status: types.StringValue("status"), + DefaultDatabase: types.StringValue("default_db"), + }, + true, + }, + { + "null_fields_and_int_conversions", + &sqlserverflexalpha.CreateUserResponse{ + Id: utils.Ptr(int64(3)), + Roles: &[]sqlserverflexalpha.UserRole{}, + Username: nil, + Password: utils.Ptr(""), + Host: nil, + Port: utils.Ptr(int64(2123456789)), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(3), + UserId: types.Int64Value(3), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), + Password: types.StringValue(""), + Host: types.StringNull(), + Port: types.Int64Value(2123456789), + Region: types.StringValue(testRegion), + DefaultDatabase: types.StringNull(), + Status: types.StringNull(), + }, + true, + }, + { + "nil_response", + nil, + testRegion, + resourceModel{}, + false, + }, + { + "nil_response_2", + &sqlserverflexalpha.CreateUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + { + "no_resource_id", + &sqlserverflexalpha.CreateUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + { + "no_password", + &sqlserverflexalpha.CreateUserResponse{ + Id: utils.Ptr(int64(1)), + }, + testRegion, + resourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + state := &resourceModel{ + ProjectId: tt.expected.ProjectId, + InstanceId: tt.expected.InstanceId, + } + err := mapFieldsCreate(tt.input, state, tt.region) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestMapFields(t *testing.T) { + const testRegion = "region" + tests := []struct { + description string + input *sqlserverflexalpha.GetUserResponse + region string + expected resourceModel + isValid bool + }{ + { + "default_values", + &sqlserverflexalpha.GetUserResponse{}, + testRegion, + resourceModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Host: types.StringNull(), + Port: types.Int64Null(), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "simple_values", + &sqlserverflexalpha.GetUserResponse{ + Roles: &[]sqlserverflexalpha.UserRole{ + "role_1", + "role_2", + "", + }, + Username: utils.Ptr("username"), + Host: utils.Ptr("host"), + Port: utils.Ptr(int64(1234)), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(2), + UserId: types.Int64Value(2), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringValue("username"), + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), + ), + Host: types.StringValue("host"), + Port: types.Int64Value(1234), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "null_fields_and_int_conversions", + &sqlserverflexalpha.GetUserResponse{ + Id: utils.Ptr(int64(1)), + Roles: &[]sqlserverflexalpha.UserRole{}, + Username: nil, + Host: nil, + Port: utils.Ptr(int64(2123456789)), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), + Host: types.StringNull(), + Port: types.Int64Value(2123456789), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "nil_response", + nil, + testRegion, + resourceModel{}, + false, + }, + { + "nil_response_2", + &sqlserverflexalpha.GetUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + { + "no_resource_id", + &sqlserverflexalpha.GetUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + state := &resourceModel{ + ProjectId: tt.expected.ProjectId, + InstanceId: tt.expected.InstanceId, + UserId: tt.expected.UserId, + } + err := mapFields(tt.input, state, tt.region) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestToCreatePayload(t *testing.T) { + tests := []struct { + description string + input *resourceModel + inputRoles []sqlserverflexalpha.UserRole + expected *sqlserverflexalpha.CreateUserRequestPayload + isValid bool + }{ + { + "default_values", + &resourceModel{}, + []sqlserverflexalpha.UserRole{}, + &sqlserverflexalpha.CreateUserRequestPayload{ + Roles: &[]sqlserverflexalpha.UserRole{}, + Username: nil, + }, + true, + }, + { + "default_values", + &resourceModel{ + Username: types.StringValue("username"), + }, + []sqlserverflexalpha.UserRole{ + "role_1", + "role_2", + }, + &sqlserverflexalpha.CreateUserRequestPayload{ + Roles: &[]sqlserverflexalpha.UserRole{ + "role_1", + "role_2", + }, + Username: utils.Ptr("username"), + }, + true, + }, + { + "null_fields_and_int_conversions", + &resourceModel{ + Username: types.StringNull(), + }, + []sqlserverflexalpha.UserRole{ + "", + }, + &sqlserverflexalpha.CreateUserRequestPayload{ + Roles: &[]sqlserverflexalpha.UserRole{ + "", + }, + Username: nil, + }, + true, + }, + { + "nil_model", + nil, + []sqlserverflexalpha.UserRole{}, + nil, + false, + }, + { + "nil_roles", + &resourceModel{ + Username: types.StringValue("username"), + }, + []sqlserverflexalpha.UserRole{}, + &sqlserverflexalpha.CreateUserRequestPayload{ + Roles: &[]sqlserverflexalpha.UserRole{}, + Username: utils.Ptr("username"), + }, + true, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + output, err := toCreatePayload(tt.input, tt.inputRoles) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(output, tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index a24cad8b..3a95c862 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -9,12 +9,12 @@ import ( "strconv" "strings" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" sqlserverflexalphagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen" sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha" - "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" @@ -31,6 +31,7 @@ var ( _ resource.ResourceWithConfigure = &userResource{} _ resource.ResourceWithImportState = &userResource{} _ resource.ResourceWithModifyPlan = &userResource{} + _ resource.ResourceWithIdentity = &userResource{} ) // NewUserResource is a helper function to simplify the provider implementation. @@ -131,6 +132,30 @@ func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, res resp.Schema = s } +// IdentitySchema defines the schema for the resource's identity attributes. +func (r *userResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + response *resource.IdentitySchemaResponse, +) { + response.IdentitySchema = identityschema.Schema{ + Attributes: map[string]identityschema.Attribute{ + "project_id": identityschema.StringAttribute{ + RequiredForImport: true, // must be set during import by the practitioner + }, + "region": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "instance_id": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "user_id": identityschema.Int64Attribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + }, + } +} + // Create creates the resource and sets the initial Terraform state. func (r *userResource) Create( ctx context.Context, @@ -402,109 +427,3 @@ func (r *userResource) ImportState( ) tflog.Info(ctx, "SQLServer Flex user state imported") } - -func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *resourceModel, region string) error { - if userResp == nil { - return fmt.Errorf("response is nil") - } - if model == nil { - return fmt.Errorf("model input is nil") - } - user := userResp - - if user.Id == nil { - return fmt.Errorf("user id not present") - } - userId := *user.Id - model.Id = types.Int64Value(userId) - model.UserId = types.Int64Value(userId) - model.Username = types.StringPointerValue(user.Username) - - if user.Password == nil { - return fmt.Errorf("user password not present") - } - model.Password = types.StringValue(*user.Password) - - if user.Roles != nil { - var roles []attr.Value - for _, role := range *user.Roles { - roles = append(roles, types.StringValue(string(role))) - } - rolesSet, diags := types.SetValue(types.StringType, roles) - if diags.HasError() { - return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) - } - model.Roles = types.List(rolesSet) - } - - if model.Roles.IsNull() || model.Roles.IsUnknown() { - model.Roles = types.List(types.SetNull(types.StringType)) - } - - model.Host = types.StringPointerValue(user.Host) - model.Port = types.Int64PointerValue(user.Port) - model.Region = types.StringValue(region) - model.Status = types.StringPointerValue(user.Status) - model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase) - - return nil -} - -func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *resourceModel, region string) error { - if userResp == nil { - return fmt.Errorf("response is nil") - } - if model == nil { - return fmt.Errorf("model input is nil") - } - user := userResp - - var userId int64 - if model.UserId.ValueInt64() != 0 { - userId = model.UserId.ValueInt64() - } else if user.Id != nil { - userId = *user.Id - } else { - return fmt.Errorf("user id not present") - } - - model.Id = types.Int64Value(userId) - model.UserId = types.Int64Value(userId) - model.Username = types.StringPointerValue(user.Username) - - if user.Roles != nil { - var roles []attr.Value - for _, role := range *user.Roles { - roles = append(roles, types.StringValue(string(role))) - } - rolesSet, diags := types.SetValue(types.StringType, roles) - if diags.HasError() { - return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) - } - model.Roles = types.List(rolesSet) - } - - if model.Roles.IsNull() || model.Roles.IsUnknown() { - model.Roles = types.List(types.SetNull(types.StringType)) - } - - model.Host = types.StringPointerValue(user.Host) - model.Port = types.Int64PointerValue(user.Port) - model.Region = types.StringValue(region) - return nil -} - -func toCreatePayload( - model *resourceModel, - roles []sqlserverflexalpha.UserRole, -) (*sqlserverflexalpha.CreateUserRequestPayload, error) { - if model == nil { - return nil, fmt.Errorf("nil model") - } - - return &sqlserverflexalpha.CreateUserRequestPayload{ - Username: conversion.StringValueToPointer(model.Username), - DefaultDatabase: conversion.StringValueToPointer(model.DefaultDatabase), - Roles: &roles, - }, nil -} diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource_test.go b/stackit/internal/services/sqlserverflexalpha/user/resource_test.go deleted file mode 100644 index 8223e831..00000000 --- a/stackit/internal/services/sqlserverflexalpha/user/resource_test.go +++ /dev/null @@ -1,388 +0,0 @@ -package sqlserverflexalpha - -import ( - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/stackitcloud/stackit-sdk-go/core/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" -) - -func TestMapFieldsCreate(t *testing.T) { - const testRegion = "region" - tests := []struct { - description string - input *sqlserverflexalpha.CreateUserResponse - region string - expected resourceModel - isValid bool - }{ - //{ - // "default_values", - // &sqlserverflexalpha.CreateUserResponse{ - // Id: utils.Ptr(int64(1)), - // Password: utils.Ptr(""), - // }, - // testRegion, - // resourceModel{ - // Id: types.Int64Value(1), - // UserId: types.Int64Value(1), - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // Username: types.StringNull(), - // Roles: types.List(types.SetNull(types.StringType)), - // Password: types.StringValue(""), - // Host: types.StringNull(), - // Port: types.Int64Null(), - // Region: types.StringValue(testRegion), - // }, - // true, - //}, - //{ - // "simple_values", - // &sqlserverflexalpha.CreateUserResponse{ - // Id: utils.Ptr(int64(2)), - // Roles: &[]sqlserverflexalpha.UserRole{ - // "role_1", - // "role_2", - // "", - // }, - // Username: utils.Ptr("username"), - // Password: utils.Ptr("password"), - // Host: utils.Ptr("host"), - // Port: utils.Ptr(int64(1234)), - // Status: utils.Ptr("status"), - // DefaultDatabase: utils.Ptr("default_db"), - // }, - // testRegion, - // resourceModel{ - // Id: types.Int64Value(2), - // UserId: types.Int64Value(2), - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // Username: types.StringValue("username"), - // Roles: types.List( - // types.SetValueMust( - // types.StringType, []attr.Value{ - // types.StringValue("role_1"), - // types.StringValue("role_2"), - // types.StringValue(""), - // }, - // ), - // ), - // Password: types.StringValue("password"), - // Host: types.StringValue("host"), - // Port: types.Int64Value(1234), - // Region: types.StringValue(testRegion), - // Status: types.StringValue("status"), - // DefaultDatabase: types.StringValue("default_db"), - // }, - // true, - //}, - //{ - // "null_fields_and_int_conversions", - // &sqlserverflexalpha.CreateUserResponse{ - // Id: utils.Ptr(int64(3)), - // Roles: &[]sqlserverflexalpha.UserRole{}, - // Username: nil, - // Password: utils.Ptr(""), - // Host: nil, - // Port: utils.Ptr(int64(2123456789)), - // }, - // testRegion, - // resourceModel{ - // Id: types.Int64Value(3), - // UserId: types.Int64Value(3), - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // Username: types.StringNull(), - // Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), - // Password: types.StringValue(""), - // Host: types.StringNull(), - // Port: types.Int64Value(2123456789), - // Region: types.StringValue(testRegion), - // DefaultDatabase: types.StringNull(), - // Status: types.StringNull(), - // }, - // true, - //}, - { - "nil_response", - nil, - testRegion, - resourceModel{}, - false, - }, - { - "nil_response_2", - &sqlserverflexalpha.CreateUserResponse{}, - testRegion, - resourceModel{}, - false, - }, - { - "no_resource_id", - &sqlserverflexalpha.CreateUserResponse{}, - testRegion, - resourceModel{}, - false, - }, - { - "no_password", - &sqlserverflexalpha.CreateUserResponse{ - Id: utils.Ptr(int64(1)), - }, - testRegion, - resourceModel{}, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - state := &resourceModel{ - ProjectId: tt.expected.ProjectId, - InstanceId: tt.expected.InstanceId, - } - err := mapFieldsCreate(tt.input, state, tt.region) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(state, &tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} - -func TestMapFields(t *testing.T) { - const testRegion = "region" - tests := []struct { - description string - input *sqlserverflexalpha.GetUserResponse - region string - expected resourceModel - isValid bool - }{ - //{ - // "default_values", - // &sqlserverflexalpha.GetUserResponse{}, - // testRegion, - // resourceModel{ - // Id: types.Int64Value(1), - // UserId: types.Int64Value(1), - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // Username: types.StringNull(), - // Roles: types.List(types.SetNull(types.StringType)), - // Host: types.StringNull(), - // Port: types.Int64Null(), - // Region: types.StringValue(testRegion), - // }, - // true, - //}, - //{ - // "simple_values", - // &sqlserverflexalpha.GetUserResponse{ - // Roles: &[]sqlserverflexalpha.UserRole{ - // "role_1", - // "role_2", - // "", - // }, - // Username: utils.Ptr("username"), - // Host: utils.Ptr("host"), - // Port: utils.Ptr(int64(1234)), - // }, - // testRegion, - // resourceModel{ - // Id: types.Int64Value(2), - // UserId: types.Int64Value(2), - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // Username: types.StringValue("username"), - // Roles: types.List( - // types.SetValueMust( - // types.StringType, []attr.Value{ - // types.StringValue("role_1"), - // types.StringValue("role_2"), - // types.StringValue(""), - // }, - // ), - // ), - // Host: types.StringValue("host"), - // Port: types.Int64Value(1234), - // Region: types.StringValue(testRegion), - // }, - // true, - //}, - //{ - // "null_fields_and_int_conversions", - // &sqlserverflexalpha.GetUserResponse{ - // Id: utils.Ptr(int64(1)), - // Roles: &[]sqlserverflexalpha.UserRole{}, - // Username: nil, - // Host: nil, - // Port: utils.Ptr(int64(2123456789)), - // }, - // testRegion, - // resourceModel{ - // Id: types.Int64Value(1), - // UserId: types.Int64Value(1), - // InstanceId: types.StringValue("iid"), - // ProjectId: types.StringValue("pid"), - // Username: types.StringNull(), - // Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), - // Host: types.StringNull(), - // Port: types.Int64Value(2123456789), - // Region: types.StringValue(testRegion), - // }, - // true, - //}, - { - "nil_response", - nil, - testRegion, - resourceModel{}, - false, - }, - { - "nil_response_2", - &sqlserverflexalpha.GetUserResponse{}, - testRegion, - resourceModel{}, - false, - }, - { - "no_resource_id", - &sqlserverflexalpha.GetUserResponse{}, - testRegion, - resourceModel{}, - false, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - state := &resourceModel{ - ProjectId: tt.expected.ProjectId, - InstanceId: tt.expected.InstanceId, - UserId: tt.expected.UserId, - } - err := mapFields(tt.input, state, tt.region) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(state, &tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} - -func TestToCreatePayload(t *testing.T) { - tests := []struct { - description string - input *resourceModel - inputRoles []sqlserverflexalpha.UserRole - expected *sqlserverflexalpha.CreateUserRequestPayload - isValid bool - }{ - { - "default_values", - &resourceModel{}, - []sqlserverflexalpha.UserRole{}, - &sqlserverflexalpha.CreateUserRequestPayload{ - Roles: &[]sqlserverflexalpha.UserRole{}, - Username: nil, - }, - true, - }, - { - "default_values", - &resourceModel{ - Username: types.StringValue("username"), - }, - []sqlserverflexalpha.UserRole{ - "role_1", - "role_2", - }, - &sqlserverflexalpha.CreateUserRequestPayload{ - Roles: &[]sqlserverflexalpha.UserRole{ - "role_1", - "role_2", - }, - Username: utils.Ptr("username"), - }, - true, - }, - { - "null_fields_and_int_conversions", - &resourceModel{ - Username: types.StringNull(), - }, - []sqlserverflexalpha.UserRole{ - "", - }, - &sqlserverflexalpha.CreateUserRequestPayload{ - Roles: &[]sqlserverflexalpha.UserRole{ - "", - }, - Username: nil, - }, - true, - }, - { - "nil_model", - nil, - []sqlserverflexalpha.UserRole{}, - nil, - false, - }, - { - "nil_roles", - &resourceModel{ - Username: types.StringValue("username"), - }, - []sqlserverflexalpha.UserRole{}, - &sqlserverflexalpha.CreateUserRequestPayload{ - Roles: &[]sqlserverflexalpha.UserRole{}, - Username: utils.Ptr("username"), - }, - true, - }, - } - for _, tt := range tests { - t.Run( - tt.description, func(t *testing.T) { - output, err := toCreatePayload(tt.input, tt.inputRoles) - if !tt.isValid && err == nil { - t.Fatalf("Should have failed") - } - if tt.isValid && err != nil { - t.Fatalf("Should not have failed: %v", err) - } - if tt.isValid { - diff := cmp.Diff(output, tt.expected) - if diff != "" { - t.Fatalf("Data does not match: %s", diff) - } - } - }, - ) - } -} diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go index 063fe6d9..66d5ad0d 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/datasource.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" @@ -28,7 +29,7 @@ func NewDatabaseDataSource() datasource.DataSource { type dataSourceModel struct { sqlserverflexbetaGen.DatabaseModel - TfId types.String `tfsdk:"id"` + TerraformId types.String `tfsdk:"id"` } type databaseDataSource struct { @@ -97,8 +98,6 @@ func (d *databaseDataSource) Configure( func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { var data dataSourceModel - readErr := "Read DB error" - // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) @@ -108,6 +107,7 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques ctx = core.InitProviderContext(ctx) + // Extract identifiers from the plan projectId := data.ProjectId.ValueString() region := d.providerData.GetRegionWithOverride(data.Region) instanceId := data.InstanceId.ValueString() @@ -120,44 +120,55 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques databaseResp, err := d.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() if err != nil { - utils.LogError( - ctx, - &resp.Diagnostics, - err, - "Reading database", - fmt.Sprintf("database with %q does not exist in project %q.", databaseName, projectId), - map[int]string{ - http.StatusForbidden: fmt.Sprintf("Project with %q not found or forbidden access", projectId), - }, - ) + handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId) resp.State.RemoveResource(ctx) return } ctx = core.LogResponse(ctx) - - dbId, ok := databaseResp.GetIdOk() - if !ok { + // Map response body to schema and populate Computed attribute values + err = mapFields(databaseResp, &data, region) + if err != nil { core.LogAndAddError( ctx, &resp.Diagnostics, - readErr, - "Database creation waiting: returned id is nil", + "Error reading database", + fmt.Sprintf("Processing API payload: %v", err), ) return } - data.Id = types.Int64Value(dbId) - data.TfId = utils.BuildInternalTerraformId(projectId, region, instanceId, databaseName) - data.Owner = types.StringValue(databaseResp.GetOwner()) - data.CollationName = types.StringValue(databaseResp.GetCollationName()) - data.CompatibilityLevel = types.Int64Value(databaseResp.GetCompatibilityLevel()) - data.DatabaseName = types.StringValue(databaseResp.GetName()) - // data.InstanceId = types.StringValue(databaseResp.GetInstanceId()) - // data.Name = types.Sometype(apiResponse.GetName()) - // data.ProjectId = types.Sometype(apiResponse.GetProjectId()) - // data.Region = types.Sometype(apiResponse.GetRegion()) - // Save data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "SQL Server Flex beta database read") + +} + +// handleReadError centralizes API error handling for the Read operation. +func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) { + utils.LogError( + ctx, + diags, + err, + "Reading database", + fmt.Sprintf( + "Could not retrieve database for instance %q in project %q.", + instanceId, + projectId, + ), + map[int]string{ + http.StatusBadRequest: fmt.Sprintf( + "Invalid request parameters for project %q and instance %q.", + projectId, + instanceId, + ), + http.StatusNotFound: fmt.Sprintf( + "Database, instance %q, or project %q not found.", + instanceId, + projectId, + ), + http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId), + }, + ) } diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper.go b/stackit/internal/services/sqlserverflexbeta/database/mapper.go new file mode 100644 index 00000000..51772af1 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/database/mapper.go @@ -0,0 +1,97 @@ +package sqlserverflexbeta + +import ( + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source. +func mapFields(source *sqlserverflexbeta.GetDatabaseResponse, model *dataSourceModel, region string) error { + if source == nil { + return fmt.Errorf("response is nil") + } + if source.Id == nil || *source.Id == 0 { + return fmt.Errorf("id not present") + } + if model == nil { + return fmt.Errorf("model given is nil") + } + + var databaseId int64 + if model.Id.ValueInt64() != 0 { + databaseId = model.Id.ValueInt64() + } else if source.Id != nil { + databaseId = *source.Id + } else { + return fmt.Errorf("database id not present") + } + + model.Id = types.Int64Value(databaseId) + model.DatabaseName = types.StringValue(source.GetName()) + model.Name = types.StringValue(source.GetName()) + model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\"")) + model.Region = types.StringValue(region) + model.ProjectId = types.StringValue(model.ProjectId.ValueString()) + model.InstanceId = types.StringValue(model.InstanceId.ValueString()) + model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel()) + model.CollationName = types.StringValue(source.GetCollationName()) + + model.TerraformId = utils.BuildInternalTerraformId( + model.ProjectId.ValueString(), + region, + model.InstanceId.ValueString(), + model.DatabaseName.ValueString(), + ) + + return nil +} + +// mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource. +func mapResourceFields(source *sqlserverflexbeta.GetDatabaseResponse, model *resourceModel, region string) error { + if source == nil { + return fmt.Errorf("response is nil") + } + if source.Id == nil || *source.Id == 0 { + return fmt.Errorf("id not present") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + + var databaseId int64 + if model.Id.ValueInt64() != 0 { + databaseId = model.Id.ValueInt64() + } else if source.Id != nil { + databaseId = *source.Id + } else { + return fmt.Errorf("database id not present") + } + + model.Id = types.Int64Value(databaseId) + model.DatabaseName = types.StringValue(source.GetName()) + model.Name = types.StringValue(source.GetName()) + model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\"")) + model.Region = types.StringValue(region) + model.ProjectId = types.StringValue(model.ProjectId.ValueString()) + model.InstanceId = types.StringValue(model.InstanceId.ValueString()) + + return nil +} + +// toCreatePayload converts the resource model to an API create payload. +func toCreatePayload(model *resourceModel) (*sqlserverflexbeta.CreateDatabaseRequestPayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + return &sqlserverflexbeta.CreateDatabaseRequestPayload{ + Name: model.Name.ValueStringPointer(), + Owner: model.Owner.ValueStringPointer(), + Collation: model.Collation.ValueStringPointer(), + Compatibility: model.Compatibility.ValueInt64Pointer(), + }, nil +} diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go new file mode 100644 index 00000000..04125a7b --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go @@ -0,0 +1,227 @@ +package sqlserverflexbeta + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/datasources_gen" +) + +func TestMapFields(t *testing.T) { + type given struct { + source *sqlserverflexbeta.GetDatabaseResponse + model *dataSourceModel + region string + } + type expected struct { + model *dataSourceModel + err bool + } + + testcases := []struct { + name string + given given + expected expected + }{ + { + name: "should map fields correctly", + given: given{ + source: &sqlserverflexbeta.GetDatabaseResponse{ + Id: utils.Ptr(int64(1)), + Name: utils.Ptr("my-db"), + CollationName: utils.Ptr("collation"), + CompatibilityLevel: utils.Ptr(int64(150)), + Owner: utils.Ptr("\"my-owner\""), + }, + model: &dataSourceModel{ + DatabaseModel: datasource.DatabaseModel{ + ProjectId: types.StringValue("my-project"), + InstanceId: types.StringValue("my-instance"), + }, + }, + region: "eu01", + }, + expected: expected{ + model: &dataSourceModel{ + DatabaseModel: datasource.DatabaseModel{ + Id: types.Int64Value(1), + Name: types.StringValue("my-db"), + DatabaseName: types.StringValue("my-db"), + Owner: types.StringValue("my-owner"), + Region: types.StringValue("eu01"), + InstanceId: types.StringValue("my-instance"), + ProjectId: types.StringValue("my-project"), + CompatibilityLevel: types.Int64Value(150), + CollationName: types.StringValue("collation"), + }, + TerraformId: types.StringValue("my-project,eu01,my-instance,my-db"), + }, + }, + }, + { + name: "should fail on nil source", + given: given{ + source: nil, + model: &dataSourceModel{}, + }, + expected: expected{err: true}, + }, + { + name: "should fail on nil source ID", + given: given{ + source: &sqlserverflexbeta.GetDatabaseResponse{Id: nil}, + model: &dataSourceModel{}, + }, + expected: expected{err: true}, + }, + { + name: "should fail on nil model", + given: given{ + source: &sqlserverflexbeta.GetDatabaseResponse{Id: utils.Ptr(int64(1))}, + model: nil, + }, + expected: expected{err: true}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + err := mapFields(tc.given.source, tc.given.model, tc.given.region) + if (err != nil) != tc.expected.err { + t.Fatalf("expected error: %v, got: %v", tc.expected.err, err) + } + if err == nil { + if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" { + t.Errorf("model mismatch (-want +got):\n%s", diff) + } + } + }, + ) + } +} + +func TestMapResourceFields(t *testing.T) { + type given struct { + source *sqlserverflexbeta.GetDatabaseResponse + model *resourceModel + region string + } + type expected struct { + model *resourceModel + err bool + } + + testcases := []struct { + name string + given given + expected expected + }{ + { + name: "should map fields correctly", + given: given{ + source: &sqlserverflexbeta.GetDatabaseResponse{ + Id: utils.Ptr(int64(1)), + Name: utils.Ptr("my-db"), + Owner: utils.Ptr("\"my-owner\""), + }, + model: &resourceModel{ + ProjectId: types.StringValue("my-project"), + InstanceId: types.StringValue("my-instance"), + }, + region: "eu01", + }, + expected: expected{ + model: &resourceModel{ + Id: types.Int64Value(1), + Name: types.StringValue("my-db"), + DatabaseName: types.StringValue("my-db"), + InstanceId: types.StringValue("my-instance"), + ProjectId: types.StringValue("my-project"), + Region: types.StringValue("eu01"), + Owner: types.StringValue("my-owner"), + }, + }, + }, + { + name: "should fail on nil source", + given: given{ + source: nil, + model: &resourceModel{}, + }, + expected: expected{err: true}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + err := mapResourceFields(tc.given.source, tc.given.model, tc.given.region) + if (err != nil) != tc.expected.err { + t.Fatalf("expected error: %v, got: %v", tc.expected.err, err) + } + if err == nil { + if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" { + t.Errorf("model mismatch (-want +got):\n%s", diff) + } + } + }, + ) + } +} + +func TestToCreatePayload(t *testing.T) { + type given struct { + model *resourceModel + } + type expected struct { + payload *sqlserverflexbeta.CreateDatabaseRequestPayload + err bool + } + + testcases := []struct { + name string + given given + expected expected + }{ + { + name: "should convert model to payload", + given: given{ + model: &resourceModel{ + Name: types.StringValue("my-db"), + Owner: types.StringValue("my-owner"), + }, + }, + expected: expected{ + payload: &sqlserverflexbeta.CreateDatabaseRequestPayload{ + Name: utils.Ptr("my-db"), + Owner: utils.Ptr("my-owner"), + }, + }, + }, + { + name: "should fail on nil model", + given: given{model: nil}, + expected: expected{err: true}, + }, + } + + for _, tc := range testcases { + t.Run( + tc.name, func(t *testing.T) { + actual, err := toCreatePayload(tc.given.model) + if (err != nil) != tc.expected.err { + t.Fatalf("expected error: %v, got: %v", tc.expected.err, err) + } + if err == nil { + if diff := cmp.Diff(tc.expected.payload, actual); diff != "" { + t.Errorf("payload mismatch (-want +got):\n%s", diff) + } + } + }, + ) + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 9c052dbb..8e09975b 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -3,9 +3,10 @@ package sqlserverflexbeta import ( "context" _ "embed" + "errors" "fmt" + "net/http" "strings" - "time" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" @@ -13,10 +14,9 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta" - + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" @@ -30,6 +30,13 @@ var ( _ resource.ResourceWithImportState = &databaseResource{} _ resource.ResourceWithModifyPlan = &databaseResource{} _ resource.ResourceWithIdentity = &databaseResource{} + + // Define errors + errDatabaseNotFound = errors.New("database not found") + + // Error message constants + extractErrorSummary = "extracting failed" + extractErrorMessage = "Extracting identity data: %v" ) func NewDatabaseResource() resource.Resource { @@ -192,18 +199,24 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - ctx = core.LogResponse(ctx) - createId, ok := createResp.GetIdOk() - if !ok { + if createResp == nil || createResp.Id == nil { core.LogAndAddError( ctx, &resp.Diagnostics, - createErr, - fmt.Sprintf("Calling API: %v", err), + "Error creating database", + "API didn't return database Id. A database might have been created", ) + return } - waitResp, err := wait.CreateDatabaseWaitHandler( + databaseId := *createResp.Id + + ctx = tflog.SetField(ctx, "database_id", databaseId) + + ctx = core.LogResponse(ctx) + + // TODO: is this neccessary to wait for the database-> API say 200 ? + /*waitResp, err := wait.CreateDatabaseWaitHandler( ctx, r.client, projectId, @@ -263,23 +276,58 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques "Database creation waiting: returned name is different", ) return + }*/ + + database, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating database", + fmt.Sprintf("Getting database details after creation: %v", err), + ) + return } - data.Id = types.Int64PointerValue(waitResp.Id) - data.Name = types.StringPointerValue(waitResp.Name) + // Map response body to schema + err = mapResourceFields(database, &data, region) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating database", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Set data returned by API in identity + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseName: types.StringValue(databaseName), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + // Set state to fully populated data + resp.Diagnostics.Append(resp.State.Set(ctx, data)...) + if resp.Diagnostics.HasError() { + return + } // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) tflog.Info(ctx, "sqlserverflexbeta.Database created") } func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data resourceModel - readErr := "[Database Read]" - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + var model resourceModel + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } @@ -293,78 +341,66 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "region", region) - - instanceId := identityData.InstanceID.ValueString() - ctx = tflog.SetField(ctx, "instance_id", instanceId) - - dbName := identityData.DatabaseName.ValueString() - ctx = tflog.SetField(ctx, "database_name", dbName) - - getResp, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, dbName).Execute() - if err != nil { + projectId, instanceId, region, databaseName, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { core.LogAndAddError( ctx, &resp.Diagnostics, - readErr, - fmt.Sprintf("Calling API: %v", err), + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), ) + } + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "database_name", databaseName) + + databaseResp, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() + if err != nil { + oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped + if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) { + resp.State.RemoveResource(ctx) + return + } + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err)) return } ctx = core.LogResponse(ctx) - data.Id = types.Int64Value(getResp.GetId()) - data.Owner = types.StringValue(getResp.GetOwner()) - data.Name = types.StringValue(getResp.GetName()) - data.DatabaseName = types.StringValue(getResp.GetName()) - data.CollationName = types.StringValue(getResp.GetCollationName()) - data.CompatibilityLevel = types.Int64Value(getResp.GetCompatibilityLevel()) + // Map response body to schema + err = mapResourceFields(databaseResp, &model, region) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error reading database", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) tflog.Info(ctx, "sqlserverflexbeta.Database read") } func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data resourceModel - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - // Read identity data - var identityData DatabaseResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "region", region) - - // Todo: Update API call logic - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - - tflog.Info(ctx, "sqlserverflexbeta.Database updated") + // TODO: Check update api endpoint - not available at the moment, so return an error for now + core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating database", "Database can't be updated") } func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data resourceModel - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + // nolint:gocritic // function signature required by Terraform + var model resourceModel + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } @@ -378,12 +414,28 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "region", region) + projectId, instanceId, region, databaseName, errExt := r.extractIdentityData(model, identityData) + if errExt != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + extractErrorSummary, + fmt.Sprintf(extractErrorMessage, errExt), + ) + } - // Todo: Delete API call logic + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "database_name", databaseName) + + // Delete existing record set + err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err)) + } + + ctx = core.LogResponse(ctx) tflog.Info(ctx, "sqlserverflexbeta.Database deleted") } @@ -504,3 +556,46 @@ func (r *databaseResource) ImportState( tflog.Info(ctx, "Sqlserverflexbeta database state imported") } + +// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model. +func (r *databaseResource) extractIdentityData( + model resourceModel, + identity DatabaseResourceIdentityModel, +) (projectId, region, instanceId, databaseName string, err error) { + if !model.DatabaseName.IsNull() && !model.DatabaseName.IsUnknown() { + databaseName = model.DatabaseName.ValueString() + } else { + if identity.DatabaseName.IsNull() || identity.DatabaseName.IsUnknown() { + return "", "", "", "", fmt.Errorf("database_name not found in config") + } + databaseName = identity.DatabaseName.ValueString() + } + + if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { + projectId = model.ProjectId.ValueString() + } else { + if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() { + return "", "", "", "", fmt.Errorf("project_id not found in config") + } + projectId = identity.ProjectID.ValueString() + } + + if !model.Region.IsNull() && !model.Region.IsUnknown() { + region = r.providerData.GetRegionWithOverride(model.Region) + } else { + if identity.Region.IsNull() || identity.Region.IsUnknown() { + return "", "", "", "", fmt.Errorf("region not found in config") + } + region = r.providerData.GetRegionWithOverride(identity.Region) + } + + if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() { + instanceId = model.InstanceId.ValueString() + } else { + if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() { + return "", "", "", "", fmt.Errorf("instance_id not found in config") + } + instanceId = identity.InstanceID.ValueString() + } + return projectId, region, instanceId, databaseName, nil +} diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go index e6491a0f..e15ad28f 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/datasource.go @@ -8,13 +8,12 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" - - // sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" - sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/datasources_gen" ) @@ -29,7 +28,7 @@ func NewUserDataSource() datasource.DataSource { type dataSourceModel struct { DefaultDatabase types.String `tfsdk:"default_database"` Host types.String `tfsdk:"host"` - Id types.Int64 `tfsdk:"id"` + Id types.String `tfsdk:"id"` InstanceId types.String `tfsdk:"instance_id"` Port types.Int64 `tfsdk:"port"` ProjectId types.String `tfsdk:"project_id"` @@ -63,50 +62,52 @@ func (d *userDataSource) Configure( req datasource.ConfigureRequest, resp *datasource.ConfigureResponse, ) { - //var ok bool - //d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) - //if !ok { - // return - //} - // - //apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) - //if resp.Diagnostics.HasError() { - // return - //} - //d.client = apiClient - tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) + var ok bool + d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + d.client = apiClient + tflog.Info(ctx, "SQL SERVER Flex alpha database client configured") } func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data dataSourceModel - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) - + var model dataSourceModel + diags := req.Config.Get(ctx, &model) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } ctx = core.InitProviderContext(ctx) - projectId := data.ProjectId.ValueString() - region := d.providerData.GetRegionWithOverride(data.Region) - instanceId := data.InstanceId.ValueString() - userId := data.UserId.ValueInt64() - + projectId := model.ProjectId.ValueString() + instanceId := model.InstanceId.ValueString() + userId := model.UserId.ValueInt64() + region := d.providerData.GetRegionWithOverride(model.Region) ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "user_id", userId) + ctx = tflog.SetField(ctx, "region", region) - userResp, err := d.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute() + recordSetResp, err := d.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute() if err != nil { utils.LogError( ctx, &resp.Diagnostics, err, "Reading user", - fmt.Sprintf("user with ID %q does not exist in project %q.", userId, projectId), + fmt.Sprintf( + "User with ID %q or instance with ID %q does not exist in project %q.", + userId, + instanceId, + projectId, + ), map[int]string{ http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), }, @@ -117,22 +118,23 @@ func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, r ctx = core.LogResponse(ctx) - // Todo: Read API call logic - - // Example data value setting - // data.Id = types.StringValue("example-id") - - err = mapResponseToModel(ctx, userResp, &data, resp.Diagnostics) + // Map response body to schema and populate Computed attribute values + err = mapDataSourceFields(recordSetResp, &model, region) if err != nil { core.LogAndAddError( ctx, &resp.Diagnostics, - fmt.Sprintf("%s Read", errorPrefix), + "Error reading user", fmt.Sprintf("Processing API payload: %v", err), ) return } - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "SQLServer Flex beta instance read") } diff --git a/stackit/internal/services/sqlserverflexbeta/user/functions.go b/stackit/internal/services/sqlserverflexbeta/user/functions.go deleted file mode 100644 index 37e297c5..00000000 --- a/stackit/internal/services/sqlserverflexbeta/user/functions.go +++ /dev/null @@ -1,98 +0,0 @@ -package sqlserverflexbeta - -import ( - "context" - "fmt" - - "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/types" - - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" -) - -func mapResponseToModel( - ctx context.Context, - resp *sqlserverflexbeta.GetUserResponse, - m *dataSourceModel, - tfDiags diag.Diagnostics, -) error { - if resp == nil { - return fmt.Errorf("response is nil") - } - - m.Id = types.Int64Value(resp.GetId()) - m.UserId = types.Int64Value(resp.GetId()) - m.Username = types.StringValue(resp.GetUsername()) - m.Port = types.Int64Value(resp.GetPort()) - m.Host = types.StringValue(resp.GetHost()) - m.DefaultDatabase = types.StringValue(resp.GetDefaultDatabase()) - m.Status = types.StringValue(resp.GetStatus()) - - if resp.Roles != nil { - roles, diags := types.ListValueFrom(ctx, types.StringType, *resp.Roles) - tfDiags.Append(diags...) - if tfDiags.HasError() { - return fmt.Errorf("failed to map roles") - } - m.Roles = roles - } else { - m.Roles = types.ListNull(types.StringType) - } - - if resp.Status != nil { - m.Status = types.StringValue(*resp.Status) - } else { - m.Status = types.StringNull() - } - - // TODO: complete and refactor - - /* - sampleList, diags := types.ListValueFrom(ctx, types.StringType, resp.GetList()) - tfDiags.Append(diags...) - if diags.HasError() { - return fmt.Errorf( - "error converting list response value", - ) - } - sample, diags := sqlserverflexbetaResGen.NewSampleValue( - sqlserverflexbetaResGen.SampleValue{}.AttributeTypes(ctx), - map[string]attr.Value{ - "field": types.StringValue(string(resp.GetField())), - }, - ) - tfDiags.Append(diags...) - if diags.HasError() { - return fmt.Errorf( - "error converting sample response value", - "sample", - types.StringValue(string(resp.GetField())), - ) - } - m.Sample = sample - */ - return nil -} - -//func toCreatePayload( -// ctx context.Context, -// model *resourceModel, -//) (*sqlserverflexbeta.CreateUserRequestPayload, error) { -// if model == nil { -// return nil, fmt.Errorf("nil model") -// } -// -// var roles []sqlserverflexbeta.UserRole -// if !model.Roles.IsNull() && !model.Roles.IsUnknown() { -// diags := model.Roles.ElementsAs(ctx, &roles, false) -// if diags.HasError() { -// return nil, fmt.Errorf("failed to convert roles: %v", diags) -// } -// } -// -// return &sqlserverflexbeta.CreateUserRequestPayload{ -// DefaultDatabase: model.DefaultDatabase.ValueStringPointer(), -// Username: model.Username.ValueStringPointer(), -// Roles: &roles, -// }, nil -//} diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper.go b/stackit/internal/services/sqlserverflexbeta/user/mapper.go new file mode 100644 index 00000000..d2324058 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper.go @@ -0,0 +1,179 @@ +package sqlserverflexbeta + +import ( + "fmt" + "strconv" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +// mapDataSourceFields maps the API response to a dataSourceModel. +func mapDataSourceFields(userResp *sqlserverflexbeta.GetUserResponse, model *dataSourceModel, region string) error { + if userResp == nil { + return fmt.Errorf("response is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + user := userResp + + // Handle user ID + var userId int64 + if model.UserId.ValueInt64() != 0 { + userId = model.UserId.ValueInt64() + } else if user.Id != nil { + userId = *user.Id + } else { + return fmt.Errorf("user id not present") + } + + // Set main attributes + model.Id = utils.BuildInternalTerraformId( + model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10), + ) + model.UserId = types.Int64Value(userId) + model.Username = types.StringPointerValue(user.Username) + + // Map roles + if user.Roles == nil { + model.Roles = types.List(types.SetNull(types.StringType)) + } else { + var roles []attr.Value + for _, role := range *user.Roles { + roles = append(roles, types.StringValue(string(role))) + } + rolesSet, diags := types.SetValue(types.StringType, roles) + if diags.HasError() { + return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) + } + model.Roles = types.List(rolesSet) + } + + // Set remaining attributes + model.Host = types.StringPointerValue(user.Host) + model.Port = types.Int64PointerValue(user.Port) + model.Region = types.StringValue(region) + model.Status = types.StringPointerValue(user.Status) + model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase) + + return nil +} + +// mapFields maps the API response to a resourceModel. +func mapFields(userResp *sqlserverflexbeta.GetUserResponse, model *resourceModel, region string) error { + if userResp == nil { + return fmt.Errorf("response is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + user := userResp + + // Handle user ID + var userId int64 + if model.UserId.ValueInt64() != 0 { + userId = model.UserId.ValueInt64() + } else if user.Id != nil { + userId = *user.Id + } else { + return fmt.Errorf("user id not present") + } + + // Set main attributes + model.Id = types.Int64Value(userId) + model.UserId = types.Int64Value(userId) + model.Username = types.StringPointerValue(user.Username) + + // Map roles + if user.Roles != nil { + var roles []attr.Value + for _, role := range *user.Roles { + roles = append(roles, types.StringValue(string(role))) + } + rolesSet, diags := types.SetValue(types.StringType, roles) + if diags.HasError() { + return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) + } + model.Roles = types.List(rolesSet) + } + + // Ensure roles is not null + if model.Roles.IsNull() || model.Roles.IsUnknown() { + model.Roles = types.List(types.SetNull(types.StringType)) + } + + // Set connection details + model.Host = types.StringPointerValue(user.Host) + model.Port = types.Int64PointerValue(user.Port) + model.Region = types.StringValue(region) + return nil +} + +// mapFieldsCreate maps the API response from creating a user to a resourceModel. +func mapFieldsCreate(userResp *sqlserverflexbeta.CreateUserResponse, model *resourceModel, region string) error { + if userResp == nil { + return fmt.Errorf("response is nil") + } + if model == nil { + return fmt.Errorf("model input is nil") + } + user := userResp + + if user.Id == nil { + return fmt.Errorf("user id not present") + } + userId := *user.Id + model.Id = types.Int64Value(userId) + model.UserId = types.Int64Value(userId) + model.Username = types.StringPointerValue(user.Username) + + if user.Password == nil { + return fmt.Errorf("user password not present") + } + model.Password = types.StringValue(*user.Password) + + if user.Roles != nil { + var roles []attr.Value + for _, role := range *user.Roles { + roles = append(roles, types.StringValue(string(role))) + } + rolesSet, diags := types.SetValue(types.StringType, roles) + if diags.HasError() { + return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) + } + model.Roles = types.List(rolesSet) + } + + if model.Roles.IsNull() || model.Roles.IsUnknown() { + model.Roles = types.List(types.SetNull(types.StringType)) + } + + model.Host = types.StringPointerValue(user.Host) + model.Port = types.Int64PointerValue(user.Port) + model.Region = types.StringValue(region) + model.Status = types.StringPointerValue(user.Status) + model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase) + + return nil +} + +// toCreatePayload converts a resourceModel to an API CreateUserRequestPayload. +func toCreatePayload( + model *resourceModel, + roles []sqlserverflexbeta.UserRole, +) (*sqlserverflexbeta.CreateUserRequestPayload, error) { + if model == nil { + return nil, fmt.Errorf("nil model") + } + + return &sqlserverflexbeta.CreateUserRequestPayload{ + Username: conversion.StringValueToPointer(model.Username), + DefaultDatabase: conversion.StringValueToPointer(model.DefaultDatabase), + Roles: &roles, + }, nil +} diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go new file mode 100644 index 00000000..23c6121c --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go @@ -0,0 +1,527 @@ +package sqlserverflexbeta + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" +) + +func TestMapDataSourceFields(t *testing.T) { + const testRegion = "region" + tests := []struct { + description string + input *sqlserverflexbeta.GetUserResponse + region string + expected dataSourceModel + isValid bool + }{ + { + "default_values", + &sqlserverflexbeta.GetUserResponse{}, + testRegion, + dataSourceModel{ + Id: types.StringValue("pid,region,iid,1"), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Host: types.StringNull(), + Port: types.Int64Null(), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + DefaultDatabase: types.StringNull(), + }, + true, + }, + { + "simple_values", + &sqlserverflexbeta.GetUserResponse{ + + Roles: &[]sqlserverflexbeta.UserRole{ + "role_1", + "role_2", + "", + }, + Username: utils.Ptr("username"), + Host: utils.Ptr("host"), + Port: utils.Ptr(int64(1234)), + Status: utils.Ptr("active"), + DefaultDatabase: utils.Ptr("default_db"), + }, + testRegion, + dataSourceModel{ + Id: types.StringValue("pid,region,iid,1"), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringValue("username"), + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), + ), + Host: types.StringValue("host"), + Port: types.Int64Value(1234), + Region: types.StringValue(testRegion), + Status: types.StringValue("active"), + DefaultDatabase: types.StringValue("default_db"), + }, + true, + }, + { + "null_fields_and_int_conversions", + &sqlserverflexbeta.GetUserResponse{ + Id: utils.Ptr(int64(1)), + Roles: &[]sqlserverflexbeta.UserRole{}, + Username: nil, + Host: nil, + Port: utils.Ptr(int64(2123456789)), + }, + testRegion, + dataSourceModel{ + Id: types.StringValue("pid,region,iid,1"), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), + Host: types.StringNull(), + Port: types.Int64Value(2123456789), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "nil_response", + nil, + testRegion, + dataSourceModel{}, + false, + }, + { + "nil_response_2", + &sqlserverflexbeta.GetUserResponse{}, + testRegion, + dataSourceModel{}, + false, + }, + { + "no_resource_id", + &sqlserverflexbeta.GetUserResponse{}, + testRegion, + dataSourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + state := &dataSourceModel{ + ProjectId: tt.expected.ProjectId, + InstanceId: tt.expected.InstanceId, + UserId: tt.expected.UserId, + } + err := mapDataSourceFields(tt.input, state, tt.region) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestMapFieldsCreate(t *testing.T) { + const testRegion = "region" + tests := []struct { + description string + input *sqlserverflexbeta.CreateUserResponse + region string + expected resourceModel + isValid bool + }{ + { + "default_values", + &sqlserverflexbeta.CreateUserResponse{ + Id: utils.Ptr(int64(1)), + Password: utils.Ptr(""), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Password: types.StringValue(""), + Host: types.StringNull(), + Port: types.Int64Null(), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "simple_values", + &sqlserverflexbeta.CreateUserResponse{ + Id: utils.Ptr(int64(2)), + Roles: &[]sqlserverflexbeta.UserRole{ + "role_1", + "role_2", + "", + }, + Username: utils.Ptr("username"), + Password: utils.Ptr("password"), + Host: utils.Ptr("host"), + Port: utils.Ptr(int64(1234)), + Status: utils.Ptr("status"), + DefaultDatabase: utils.Ptr("default_db"), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(2), + UserId: types.Int64Value(2), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringValue("username"), + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), + ), + Password: types.StringValue("password"), + Host: types.StringValue("host"), + Port: types.Int64Value(1234), + Region: types.StringValue(testRegion), + Status: types.StringValue("status"), + DefaultDatabase: types.StringValue("default_db"), + }, + true, + }, + { + "null_fields_and_int_conversions", + &sqlserverflexbeta.CreateUserResponse{ + Id: utils.Ptr(int64(3)), + Roles: &[]sqlserverflexbeta.UserRole{}, + Username: nil, + Password: utils.Ptr(""), + Host: nil, + Port: utils.Ptr(int64(2123456789)), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(3), + UserId: types.Int64Value(3), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), + Password: types.StringValue(""), + Host: types.StringNull(), + Port: types.Int64Value(2123456789), + Region: types.StringValue(testRegion), + DefaultDatabase: types.StringNull(), + Status: types.StringNull(), + }, + true, + }, + { + "nil_response", + nil, + testRegion, + resourceModel{}, + false, + }, + { + "nil_response_2", + &sqlserverflexbeta.CreateUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + { + "no_resource_id", + &sqlserverflexbeta.CreateUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + { + "no_password", + &sqlserverflexbeta.CreateUserResponse{ + Id: utils.Ptr(int64(1)), + }, + testRegion, + resourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + state := &resourceModel{ + ProjectId: tt.expected.ProjectId, + InstanceId: tt.expected.InstanceId, + } + err := mapFieldsCreate(tt.input, state, tt.region) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestMapFields(t *testing.T) { + const testRegion = "region" + tests := []struct { + description string + input *sqlserverflexbeta.GetUserResponse + region string + expected resourceModel + isValid bool + }{ + { + "default_values", + &sqlserverflexbeta.GetUserResponse{}, + testRegion, + resourceModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Host: types.StringNull(), + Port: types.Int64Null(), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "simple_values", + &sqlserverflexbeta.GetUserResponse{ + Roles: &[]sqlserverflexbeta.UserRole{ + "role_1", + "role_2", + "", + }, + Username: utils.Ptr("username"), + Host: utils.Ptr("host"), + Port: utils.Ptr(int64(1234)), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(2), + UserId: types.Int64Value(2), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringValue("username"), + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), + ), + Host: types.StringValue("host"), + Port: types.Int64Value(1234), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "null_fields_and_int_conversions", + &sqlserverflexbeta.GetUserResponse{ + Id: utils.Ptr(int64(1)), + Roles: &[]sqlserverflexbeta.UserRole{}, + Username: nil, + Host: nil, + Port: utils.Ptr(int64(2123456789)), + }, + testRegion, + resourceModel{ + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Username: types.StringNull(), + Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), + Host: types.StringNull(), + Port: types.Int64Value(2123456789), + Region: types.StringValue(testRegion), + }, + true, + }, + { + "nil_response", + nil, + testRegion, + resourceModel{}, + false, + }, + { + "nil_response_2", + &sqlserverflexbeta.GetUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + { + "no_resource_id", + &sqlserverflexbeta.GetUserResponse{}, + testRegion, + resourceModel{}, + false, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + state := &resourceModel{ + ProjectId: tt.expected.ProjectId, + InstanceId: tt.expected.InstanceId, + UserId: tt.expected.UserId, + } + err := mapFields(tt.input, state, tt.region) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(state, &tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} + +func TestToCreatePayload(t *testing.T) { + tests := []struct { + description string + input *resourceModel + inputRoles []sqlserverflexbeta.UserRole + expected *sqlserverflexbeta.CreateUserRequestPayload + isValid bool + }{ + { + "default_values", + &resourceModel{}, + []sqlserverflexbeta.UserRole{}, + &sqlserverflexbeta.CreateUserRequestPayload{ + Roles: &[]sqlserverflexbeta.UserRole{}, + Username: nil, + }, + true, + }, + { + "default_values", + &resourceModel{ + Username: types.StringValue("username"), + }, + []sqlserverflexbeta.UserRole{ + "role_1", + "role_2", + }, + &sqlserverflexbeta.CreateUserRequestPayload{ + Roles: &[]sqlserverflexbeta.UserRole{ + "role_1", + "role_2", + }, + Username: utils.Ptr("username"), + }, + true, + }, + { + "null_fields_and_int_conversions", + &resourceModel{ + Username: types.StringNull(), + }, + []sqlserverflexbeta.UserRole{ + "", + }, + &sqlserverflexbeta.CreateUserRequestPayload{ + Roles: &[]sqlserverflexbeta.UserRole{ + "", + }, + Username: nil, + }, + true, + }, + { + "nil_model", + nil, + []sqlserverflexbeta.UserRole{}, + nil, + false, + }, + { + "nil_roles", + &resourceModel{ + Username: types.StringValue("username"), + }, + []sqlserverflexbeta.UserRole{}, + &sqlserverflexbeta.CreateUserRequestPayload{ + Roles: &[]sqlserverflexbeta.UserRole{}, + Username: utils.Ptr("username"), + }, + true, + }, + } + for _, tt := range tests { + t.Run( + tt.description, func(t *testing.T) { + output, err := toCreatePayload(tt.input, tt.inputRoles) + if !tt.isValid && err == nil { + t.Fatalf("Should have failed") + } + if tt.isValid && err != nil { + t.Fatalf("Should not have failed: %v", err) + } + if tt.isValid { + diff := cmp.Diff(output, tt.expected) + if diff != "" { + t.Fatalf("Data does not match: %s", diff) + } + } + }, + ) + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index 00920927..03981b93 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -3,7 +3,9 @@ package sqlserverflexbeta import ( "context" _ "embed" + "errors" "fmt" + "net/http" "strconv" "strings" @@ -12,9 +14,12 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" - "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + sqlserverflexbetagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/resources_gen" + sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" + sqlserverflexbetaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" @@ -54,319 +59,20 @@ func (r *userResource) Metadata(ctx context.Context, req resource.MetadataReques resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user" } -//go:embed planModifiers.yaml -var modifiersFileByte []byte - -func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - - s := sqlserverflexbetaResGen.UserResourceSchema(ctx) - - fields, err := utils.ReadModifiersConfig(modifiersFileByte) - if err != nil { - resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) - return - } - - err = utils.AddPlanModifiersToResourceSchema(fields, &s) - if err != nil { - resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) - return - } - resp.Schema = s -} - -func (r *userResource) IdentitySchema( - _ context.Context, - _ resource.IdentitySchemaRequest, - resp *resource.IdentitySchemaResponse, -) { - resp.IdentitySchema = identityschema.Schema{ - Attributes: map[string]identityschema.Attribute{ - "project_id": identityschema.StringAttribute{ - RequiredForImport: true, // must be set during import by the practitioner - }, - "region": identityschema.StringAttribute{ - RequiredForImport: true, // can be defaulted by the provider configuration - }, - "instance_id": identityschema.StringAttribute{ - RequiredForImport: true, // can be defaulted by the provider configuration - }, - }, - } -} - // Configure adds the provider configured client to the resource. -func (r *userResource) Configure( - ctx context.Context, - req resource.ConfigureRequest, - resp *resource.ConfigureResponse, -) { +func (r *userResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { var ok bool r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { return } - apiClientConfigOptions := []config.ConfigurationOption{ - config.WithCustomAuth(r.providerData.RoundTripper), - utils.UserAgentConfigOption(r.providerData.Version), - } - if r.providerData.SQLServerFlexCustomEndpoint != "" { - apiClientConfigOptions = append( - apiClientConfigOptions, - config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint), - ) - } else { - apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) - } - apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...) - if err != nil { - resp.Diagnostics.AddError( - "Error configuring API client", - fmt.Sprintf( - "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", - err, - ), - ) + apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics) + if resp.Diagnostics.HasError() { return } r.client = apiClient - tflog.Info(ctx, "sqlserverflexbeta.User client configured") -} - -func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data resourceModel - - // Read Terraform plan data into the model - resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) - - if resp.Diagnostics.HasError() { - return - } - - // Read identity data - var identityData UserResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "region", region) - - //payload, err := toCreatePayload(ctx, &data) - //if err != nil { - // core.LogAndAddError( - // ctx, - // &resp.Diagnostics, - // "Error creating User", - // fmt.Sprintf("Creating API payload: %v", err), - // ) - // return - //} - //payload = payload - - // TODO: Create API call logic - /* - // Generate API request body from model - payload, err := toCreatePayload(ctx, &model) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error creating User", - fmt.Sprintf("Creating API payload: %v", err), - ) - return - } - // Create new User - createResp, err := r.client.CreateUserRequest( - ctx, - projectId, - region, - ).CreateUserRequestPayload(*payload).Execute() - if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating User", fmt.Sprintf("Calling API: %v", err)) - return - } - - ctx = core.LogResponse(ctx) - - UserId := *createResp.Id - */ - - // Example data value setting - //data.UserId = types.StringValue("id-from-response") - - // TODO: Set data returned by API in identity - identity := UserResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - // TODO: add missing values - // UserID: types.StringValue(UserId), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { - return - } - - // TODO: implement wait handler if needed - /* - - waitResp, err := wait.CreateUserWaitHandler( - ctx, - r.client, - projectId, - UserId, - region, - ).SetSleepBeforeWait( - 30 * time.Second, - ).SetTimeout( - 90 * time.Minute, - ).WaitWithContext(ctx) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error creating User", - fmt.Sprintf("User creation waiting: %v", err), - ) - return - } - - if waitResp.Id == nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error creating User", - "User creation waiting: returned id is nil", - ) - return - } - - // Map response body to schema - err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error creating User", - fmt.Sprintf("Processing API payload: %v", err), - ) - return - } - - */ - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - - tflog.Info(ctx, "sqlserverflexbeta.User created") -} - -func (r *userResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data resourceModel - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - // Read identity data - var identityData UserResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "region", region) - - // Todo: Read API call logic - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - - // TODO: Set data returned by API in identity - identity := UserResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - // InstanceID: types.StringValue(instanceId), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { - return - } - - tflog.Info(ctx, "sqlserverflexbeta.User read") -} - -func (r *userResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data resourceModel - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - // Read identity data - var identityData UserResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "region", region) - - // Todo: Update API call logic - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - - tflog.Info(ctx, "sqlserverflexbeta.User updated") -} - -func (r *userResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data resourceModel - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - // Read identity data - var identityData UserResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "region", region) - - // Todo: Delete API call logic - - tflog.Info(ctx, "sqlserverflexbeta.User deleted") + tflog.Info(ctx, "SQLServer Beta Flex user client configured") } // ModifyPlan implements resource.ResourceWithModifyPlan. @@ -397,25 +103,256 @@ func (r *userResource) ModifyPlan( return } - var identityModel UserResourceIdentityModel - identityModel.ProjectID = planModel.ProjectId - identityModel.Region = planModel.Region - // TODO: complete - //if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { - // identityModel.InstanceID = planModel.InstanceId - //} - - resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) - if resp.Diagnostics.HasError() { - return - } - resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) if resp.Diagnostics.HasError() { return } } +//go:embed planModifiers.yaml +var modifiersFileByte []byte + +// Schema defines the schema for the resource. +func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + s := sqlserverflexbetagen.UserResourceSchema(ctx) + + fields, err := utils.ReadModifiersConfig(modifiersFileByte) + if err != nil { + resp.Diagnostics.AddError("error during read modifiers config file", err.Error()) + return + } + + err = utils.AddPlanModifiersToResourceSchema(fields, &s) + if err != nil { + resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) + return + } + resp.Schema = s +} + +// IdentitySchema defines the schema for the resource's identity attributes. +func (r *userResource) IdentitySchema( + _ context.Context, + _ resource.IdentitySchemaRequest, + response *resource.IdentitySchemaResponse, +) { + response.IdentitySchema = identityschema.Schema{ + Attributes: map[string]identityschema.Attribute{ + "project_id": identityschema.StringAttribute{ + RequiredForImport: true, // must be set during import by the practitioner + }, + "region": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "instance_id": identityschema.StringAttribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + "user_id": identityschema.Int64Attribute{ + RequiredForImport: true, // can be defaulted by the provider configuration + }, + }, + } +} + +// Create creates the resource and sets the initial Terraform state. +func (r *userResource) Create( + ctx context.Context, + req resource.CreateRequest, + resp *resource.CreateResponse, +) { // nolint:gocritic // function signature required by Terraform + var model resourceModel + diags := req.Plan.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := model.ProjectId.ValueString() + instanceId := model.InstanceId.ValueString() + region := model.Region.ValueString() + + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + ctx = tflog.SetField(ctx, "region", region) + + var roles []sqlserverflexbeta.UserRole + if !model.Roles.IsNull() && !model.Roles.IsUnknown() { + diags = model.Roles.ElementsAs(ctx, &roles, false) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + } + + // Generate API request body from model + payload, err := toCreatePayload(&model, roles) + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err)) + return + } + // Create new user + userResp, err := r.client.CreateUserRequest( + ctx, + projectId, + region, + instanceId, + ).CreateUserRequestPayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + if userResp == nil || userResp.Id == nil || *userResp.Id == 0 { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating user", + "API didn't return user Id. A user might have been created", + ) + return + } + userId := *userResp.Id + ctx = tflog.SetField(ctx, "user_id", userId) + + // Map response body to schema + err = mapFieldsCreate(userResp, &model, region) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating user", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + // Set state to fully populated data + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "SQLServer Flex user created") +} + +// Read refreshes the Terraform state with the latest data. +func (r *userResource) Read( + ctx context.Context, + req resource.ReadRequest, + resp *resource.ReadResponse, +) { // nolint:gocritic // function signature required by Terraform + var model resourceModel + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := model.ProjectId.ValueString() + instanceId := model.InstanceId.ValueString() + userId := model.UserId.ValueInt64() + region := r.providerData.GetRegionWithOverride(model.Region) + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + ctx = tflog.SetField(ctx, "user_id", userId) + ctx = tflog.SetField(ctx, "region", region) + + recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute() + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As( + err, + &oapiErr, + ) + //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped + if ok && oapiErr.StatusCode == http.StatusNotFound { + resp.State.RemoveResource(ctx) + return + } + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading user", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + // Map response body to schema + err = mapFields(recordSetResp, &model, region) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error reading user", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Set refreshed state + diags = resp.State.Set(ctx, model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + tflog.Info(ctx, "SQLServer Flex user read") +} + +// Update updates the resource and sets the updated Terraform state on success. +func (r *userResource) Update( + ctx context.Context, + _ resource.UpdateRequest, + resp *resource.UpdateResponse, +) { // nolint:gocritic // function signature required by Terraform + // Update shouldn't be called + core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", "User can't be updated") +} + +// Delete deletes the resource and removes the Terraform state on success. +func (r *userResource) Delete( + ctx context.Context, + req resource.DeleteRequest, + resp *resource.DeleteResponse, +) { // nolint:gocritic // function signature required by Terraform + // Retrieve values from plan + var model resourceModel + diags := req.State.Get(ctx, &model) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := model.ProjectId.ValueString() + instanceId := model.InstanceId.ValueString() + userId := model.UserId.ValueInt64() + region := model.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + ctx = tflog.SetField(ctx, "user_id", userId) + ctx = tflog.SetField(ctx, "region", region) + + // Delete existing record set + // err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute() + + // Delete existing record set + _, err := sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). + WaitWithContext(ctx) + //err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + tflog.Info(ctx, "SQLServer Flex user deleted") +} + // ImportState imports a resource into the Terraform state on success. // The expected format of the resource import identifier is: project_id,zone_id,record_set_id func (r *userResource) ImportState( @@ -423,6 +360,7 @@ func (r *userResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { + ctx = core.InitProviderContext(ctx) if req.ID != "" { @@ -457,7 +395,7 @@ func (r *userResource) ImportState( resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...) - tflog.Info(ctx, "Sqlserverflexbeta user state imported") + tflog.Info(ctx, "Postgres Flex user state imported") return } @@ -482,9 +420,8 @@ func (r *userResource) ImportState( core.LogAndAddWarning( ctx, &resp.Diagnostics, - "Sqlserverflexbeta database imported with empty password", - "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", + "SQLServer Flex user imported with empty password", + "The user password is not imported as it is only available upon creation of a new user. The password field will be empty.", ) - tflog.Info(ctx, "Sqlserverflexbeta user state imported") - + tflog.Info(ctx, "SQLServer Flex user state imported") } diff --git a/stackit/internal/services/sqlserverflexbeta/utils/util.go b/stackit/internal/services/sqlserverflexbeta/utils/util.go new file mode 100644 index 00000000..df638d8c --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/utils/util.go @@ -0,0 +1,47 @@ +package utils + +import ( + "context" + "fmt" + + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +func ConfigureClient( + ctx context.Context, + providerData *core.ProviderData, + diags *diag.Diagnostics, +) *sqlserverflex.APIClient { + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(providerData.RoundTripper), + utils.UserAgentConfigOption(providerData.Version), + } + if providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(providerData.GetRegion())) + } + apiClient, err := sqlserverflex.NewAPIClient(apiClientConfigOptions...) + if err != nil { + core.LogAndAddError( + ctx, + diags, + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return nil + } + + return apiClient +} diff --git a/stackit/internal/services/sqlserverflexbeta/utils/util_test.go b/stackit/internal/services/sqlserverflexbeta/utils/util_test.go new file mode 100644 index 00000000..08ac1974 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/utils/util_test.go @@ -0,0 +1,97 @@ +package utils + +import ( + "context" + "os" + "reflect" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/diag" + sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients" + "github.com/stackitcloud/stackit-sdk-go/core/config" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" +) + +const ( + testVersion = "1.2.3" + testCustomEndpoint = "https://sqlserverflex-custom-endpoint.api.stackit.cloud" +) + +func TestConfigureClient(t *testing.T) { + /* mock authentication by setting service account token env variable */ + os.Clearenv() + err := os.Setenv(sdkClients.ServiceAccountToken, "mock-val") + if err != nil { + t.Errorf("error setting env variable: %v", err) + } + + type args struct { + providerData *core.ProviderData + } + tests := []struct { + name string + args args + wantErr bool + expected *sqlserverflex.APIClient + }{ + { + name: "default endpoint", + args: args{ + providerData: &core.ProviderData{ + Version: testVersion, + }, + }, + expected: func() *sqlserverflex.APIClient { + apiClient, err := sqlserverflex.NewAPIClient( + config.WithRegion("eu01"), + utils.UserAgentConfigOption(testVersion), + ) + if err != nil { + t.Errorf("error configuring client: %v", err) + } + return apiClient + }(), + wantErr: false, + }, + { + name: "custom endpoint", + args: args{ + providerData: &core.ProviderData{ + Version: testVersion, + SQLServerFlexCustomEndpoint: testCustomEndpoint, + }, + }, + expected: func() *sqlserverflex.APIClient { + apiClient, err := sqlserverflex.NewAPIClient( + utils.UserAgentConfigOption(testVersion), + config.WithEndpoint(testCustomEndpoint), + ) + if err != nil { + t.Errorf("error configuring client: %v", err) + } + return apiClient + }(), + wantErr: false, + }, + } + for _, tt := range tests { + t.Run( + tt.name, func(t *testing.T) { + ctx := context.Background() + diags := diag.Diagnostics{} + + actual := ConfigureClient(ctx, tt.args.providerData, &diags) + if diags.HasError() != tt.wantErr { + t.Errorf("ConfigureClient() error = %v, want %v", diags.HasError(), tt.wantErr) + } + + if !reflect.DeepEqual(actual, tt.expected) { + t.Errorf("ConfigureClient() = %v, want %v", actual, tt.expected) + } + }, + ) + } +} diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go index 36da00b1..3bef0c64 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait.go @@ -27,9 +27,35 @@ const ( // APIClientInterface Interface needed for tests type APIClientInterface interface { - GetInstanceRequestExecute(ctx context.Context, projectId, region, instanceId string) (*sqlserverflex.GetInstanceResponse, error) - GetDatabaseRequestExecute(ctx context.Context, projectId string, region string, instanceId string, databaseName string) (*sqlserverflex.GetDatabaseResponse, error) - GetUserRequestExecute(ctx context.Context, projectId string, region string, instanceId string, userId int64) (*sqlserverflex.GetUserResponse, error) + GetInstanceRequestExecute( + ctx context.Context, + projectId, region, instanceId string, + ) (*sqlserverflex.GetInstanceResponse, error) + GetDatabaseRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + databaseName string, + ) (*sqlserverflex.GetDatabaseResponse, error) + GetUserRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + userId int64, + ) (*sqlserverflex.GetUserResponse, error) +} + +// APIClientUserInterface Interface needed for tests +type APIClientUserInterface interface { + DeleteUserRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + userId int64, + ) error } // CreateInstanceWaitHandler will wait for instance creation @@ -38,93 +64,115 @@ func CreateInstanceWaitHandler( a APIClientInterface, projectId, instanceId, region string, ) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { - handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { - s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) - if err != nil { - return false, nil, err - } - if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { - return false, nil, nil - } - switch strings.ToLower(string(*s.Status)) { - case strings.ToLower(InstanceStateSuccess): - if *s.Network.AccessScope == "SNA" { - if s.Network.InstanceAddress == nil { - tflog.Info(ctx, "Waiting for instance_address") - return false, nil, nil - } - if s.Network.RouterAddress == nil { - tflog.Info(ctx, "Waiting for router_address") - return false, nil, nil - } + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { + s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) + if err != nil { + return false, nil, err } - return true, s, nil - case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): - return true, s, fmt.Errorf("create failed for instance with id %s", instanceId) - case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): - tflog.Info(ctx, "request is being handled", map[string]interface{}{ - "status": *s.Status, - }) - return false, nil, nil - default: - tflog.Info(ctx, "Wait (create) received unknown status", map[string]interface{}{ - "instanceId": instanceId, - "status": s.Status, - }) - return false, s, nil - } - }) + if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { + return false, nil, nil + } + switch strings.ToLower(string(*s.Status)) { + case strings.ToLower(InstanceStateSuccess): + if *s.Network.AccessScope == "SNA" { + if s.Network.InstanceAddress == nil { + tflog.Info(ctx, "Waiting for instance_address") + return false, nil, nil + } + if s.Network.RouterAddress == nil { + tflog.Info(ctx, "Waiting for router_address") + return false, nil, nil + } + } + return true, s, nil + case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): + return true, s, fmt.Errorf("create failed for instance with id %s", instanceId) + case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): + tflog.Info( + ctx, "request is being handled", map[string]interface{}{ + "status": *s.Status, + }, + ) + return false, nil, nil + default: + tflog.Info( + ctx, "Wait (create) received unknown status", map[string]interface{}{ + "instanceId": instanceId, + "status": s.Status, + }, + ) + return false, s, nil + } + }, + ) return handler } // UpdateInstanceWaitHandler will wait for instance update -func UpdateInstanceWaitHandler(ctx context.Context, a APIClientInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { - handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { - s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) - if err != nil { - return false, nil, err - } - if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { - return false, nil, nil - } - switch strings.ToLower(string(*s.Status)) { - case strings.ToLower(InstanceStateSuccess): - return true, s, nil - case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): - return true, s, fmt.Errorf("update failed for instance with id %s", instanceId) - case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): - tflog.Info(ctx, "request is being handled", map[string]interface{}{ - "status": *s.Status, - }) - return false, s, nil - default: - tflog.Info(ctx, "Wait (update) received unknown status", map[string]interface{}{ - "instanceId": instanceId, - "status": s.Status, - }) - return false, s, nil - } - }) +func UpdateInstanceWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region string, +) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { + s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) + if err != nil { + return false, nil, err + } + if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { + return false, nil, nil + } + switch strings.ToLower(string(*s.Status)) { + case strings.ToLower(InstanceStateSuccess): + return true, s, nil + case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): + return true, s, fmt.Errorf("update failed for instance with id %s", instanceId) + case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): + tflog.Info( + ctx, "request is being handled", map[string]interface{}{ + "status": *s.Status, + }, + ) + return false, s, nil + default: + tflog.Info( + ctx, "Wait (update) received unknown status", map[string]interface{}{ + "instanceId": instanceId, + "status": s.Status, + }, + ) + return false, s, nil + } + }, + ) return handler } // DeleteInstanceWaitHandler will wait for instance deletion -func DeleteInstanceWaitHandler(ctx context.Context, a APIClientInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { - handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { - s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) - if err == nil { - return false, s, nil - } - var oapiErr *oapierror.GenericOpenAPIError - ok := errors.As(err, &oapiErr) - if !ok { - return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") - } - if oapiErr.StatusCode != http.StatusNotFound { - return false, nil, err - } - return true, nil, nil - }) +func DeleteInstanceWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region string, +) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { + s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) + if err == nil { + return false, s, nil + } + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return true, nil, nil + }, + ) handler.SetTimeout(30 * time.Minute) return handler } @@ -135,23 +183,60 @@ func CreateDatabaseWaitHandler( a APIClientInterface, projectId, instanceId, region, databaseName string, ) *wait.AsyncActionHandler[sqlserverflex.GetDatabaseResponse] { - handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetDatabaseResponse, err error) { - s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName) - if err != nil { - return false, nil, err - } - if s == nil || s.Name == nil || *s.Name != databaseName { - return false, nil, nil - } - var oapiErr *oapierror.GenericOpenAPIError - ok := errors.As(err, &oapiErr) - if !ok { - return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") - } - if oapiErr.StatusCode != http.StatusNotFound { - return false, nil, err - } - return true, nil, nil - }) + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetDatabaseResponse, err error) { + s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName) + if err != nil { + return false, nil, err + } + if s == nil || s.Name == nil || *s.Name != databaseName { + return false, nil, nil + } + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return true, nil, nil + }, + ) + return handler +} + +// DeleteUserWaitHandler will wait for instance deletion +func DeleteUserWaitHandler( + ctx context.Context, + a APIClientUserInterface, + projectId, instanceId, region string, + userId int64, +) *wait.AsyncActionHandler[struct{}] { + handler := wait.New( + func() (waitFinished bool, response *struct{}, err error) { + err = a.DeleteUserRequestExecute(ctx, projectId, region, instanceId, userId) + if err == nil { + return false, nil, nil + } + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + + switch oapiErr.StatusCode { + case http.StatusNotFound: + return true, nil, nil + case http.StatusInternalServerError: + tflog.Warn(ctx, "Wait handler got error 500") + return false, nil, nil + default: + return false, nil, err + } + }, + ) + handler.SetTimeout(15 * time.Minute) + handler.SetSleepBeforeWait(15 * time.Second) return handler } From f2bffa9ece57b02b8c4bccc9cf9d6f7a5dab36a8 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Wed, 11 Feb 2026 09:07:38 +0000 Subject: [PATCH 15/41] chore: add_protocol (#47) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/47 Reviewed-by: Andre_Harms Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .github/actions/setup-cache-go/action.yaml | 20 +- .github/workflows/ci.yaml | 11 + cmd/cmd/publish/provider.go | 2 +- .../sqlserverflex_acc_test.go | 1068 ++++++----------- 4 files changed, 391 insertions(+), 710 deletions(-) diff --git a/.github/actions/setup-cache-go/action.yaml b/.github/actions/setup-cache-go/action.yaml index 8837ca59..81f0d17d 100644 --- a/.github/actions/setup-cache-go/action.yaml +++ b/.github/actions/setup-cache-go/action.yaml @@ -1,30 +1,39 @@ -# SPDX-License-Identifier: MIT -name: 'Forgejo Actions to setup Go and cache dependencies' -author: 'Forgejo authors' +name: 'Setup Go and cache dependencies' +author: 'Forgejo authors, Marcel S. Henselin' description: | Wrap the setup-go with improved dependency caching. + inputs: username: description: 'User for which to manage the dependency cache' default: root + go-version: + description: "go version to install" + default: '1.25' + required: true + runs: using: "composite" steps: - name: "Install zstd for faster caching" + shell: bash run: | apt-get update -qq apt-get -q install -qq -y zstd - name: "Set up Go using setup-go" - uses: https://data.forgejo.org/actions/setup-go@v6 + uses: https://code.forgejo.org/actions/setup-go@v6 id: go-version with: - go-version-file: "go.mod" + go-version: ${{ inputs.go-version }} + check-latest: true # Always check for the latest patch release + # go-version-file: "go.mod" # do not cache dependencies, we do this manually cache: false - name: "Get go environment information" + shell: bash id: go-environment run: | chmod 755 $HOME # ensure ${RUN_AS_USER} has permission when go is located in $HOME @@ -36,6 +45,7 @@ runs: GO_VERSION: ${{ steps.go-version.outputs.go-version }} - name: "Create cache folders with correct permissions (for non-root users)" + shell: bash if: inputs.username != 'root' # when the cache is restored, only the permissions of the last part are restored # so assuming that /home/user exists and we are restoring /home/user/go/pkg/mod, diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ac9771a6..693eec61 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -146,6 +146,17 @@ jobs: - name: Test run: make test + - name: Check coverage threshold + shell: bash + run: | + make coverage + COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//') + echo "Coverage: $COVERAGE%" + if (( $(echo "$COVERAGE < 80" | bc -l) )); then + echo "Coverage is below 80%" + # exit 1 + fi + - name: Archive code coverage results uses: actions/upload-artifact@v4 with: diff --git a/cmd/cmd/publish/provider.go b/cmd/cmd/publish/provider.go index 92a77b9a..d581fbe3 100644 --- a/cmd/cmd/publish/provider.go +++ b/cmd/cmd/publish/provider.go @@ -143,7 +143,7 @@ func (p *Provider) createVersionsFile() error { // Build the versions file... version := Version{ Version: p.Version, - Protocols: []string{"5.1"}, + Protocols: []string{"5.1", "6.0"}, Platforms: nil, } for _, sum := range shasums { diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go index 56001b87..5138deee 100644 --- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go @@ -18,8 +18,8 @@ import ( "github.com/stackitcloud/stackit-sdk-go/core/utils" "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex" "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/wait" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil" ) var ( @@ -30,35 +30,20 @@ var ( ) var testConfigVarsMin = config.Variables{ - "project_id": config.StringVariable(testutil.ProjectId), - "name": config.StringVariable( - fmt.Sprintf( - "tf-acc-%s", - acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum), - ), - ), + "project_id": config.StringVariable(testutils.ProjectId), + "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), "flavor_cpu": config.IntegerVariable(4), "flavor_ram": config.IntegerVariable(16), "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"), "replicas": config.IntegerVariable(1), "flavor_id": config.StringVariable("4.16-Single"), - "username": config.StringVariable( - fmt.Sprintf( - "tf-acc-user-%s", - acctest.RandStringFromCharSet(7, acctest.CharSetAlpha), - ), - ), - "role": config.StringVariable("##STACKIT_LoginManager##"), + "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), + "role": config.StringVariable("##STACKIT_LoginManager##"), } var testConfigVarsMax = config.Variables{ - "project_id": config.StringVariable(testutil.ProjectId), - "name": config.StringVariable( - fmt.Sprintf( - "tf-acc-%s", - acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum), - ), - ), + "project_id": config.StringVariable(testutils.ProjectId), + "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), "acl1": config.StringVariable("192.168.0.0/16"), "flavor_cpu": config.IntegerVariable(4), "flavor_ram": config.IntegerVariable(16), @@ -70,19 +55,14 @@ var testConfigVarsMax = config.Variables{ "options_retention_days": config.IntegerVariable(64), "flavor_id": config.StringVariable("4.16-Single"), "backup_schedule": config.StringVariable("00 6 * * *"), - "username": config.StringVariable( - fmt.Sprintf( - "tf-acc-user-%s", - acctest.RandStringFromCharSet(7, acctest.CharSetAlpha), - ), - ), - "role": config.StringVariable("##STACKIT_LoginManager##"), - "region": config.StringVariable(testutil.Region), + "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), + "role": config.StringVariable("##STACKIT_LoginManager##"), + "region": config.StringVariable(testutils.Region), } func configVarsMinUpdated() config.Variables { temp := maps.Clone(testConfigVarsMax) - temp["name"] = config.StringVariable(testutil.ConvertConfigVariable(temp["name"]) + "changed") + temp["name"] = config.StringVariable(testutils.ConvertConfigVariable(temp["name"]) + "changed") return temp } @@ -93,685 +73,375 @@ func configVarsMaxUpdated() config.Variables { } func TestAccSQLServerFlexMinResource(t *testing.T) { - resource.Test( - t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccChecksqlserverflexDestroy, - Steps: []resource.TestStep{ - // Creation - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "project_id", - testutil.ConvertConfigVariable(testConfigVarsMin["project_id"]), - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "name", - testutil.ConvertConfigVariable(testConfigVarsMin["name"]), - ), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.description", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "replicas", - testutil.ConvertConfigVariable(testConfigVarsMin["replicas"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.cpu", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.ram", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"]), - ), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccChecksqlserverflexDestroy, + Steps: []resource.TestStep{ + // Creation + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: testConfigVarsMin, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMin["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", ), - }, - // Update - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "project_id", - testutil.ConvertConfigVariable(testConfigVarsMin["project_id"]), - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "name", - testutil.ConvertConfigVariable(testConfigVarsMin["name"]), - ), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.description", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.cpu", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.ram", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"]), - ), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", ), - }, - // data source - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "project_id", - testutil.ConvertConfigVariable(testConfigVarsMin["project_id"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "name", - testutil.ConvertConfigVariable(testConfigVarsMin["name"]), - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_user.user", "instance_id", - ), - - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "flavor.id", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_id"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "flavor.description", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "flavor.cpu", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "flavor.ram", - testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"]), - ), - - // User data - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_user.user", - "project_id", - testutil.ConvertConfigVariable(testConfigVarsMin["project_id"]), - ), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_user.user", - "username", - testutil.ConvertConfigVariable(testConfigVarsMin["username"]), - ), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_user.user", - "roles.0", - testutil.ConvertConfigVariable(testConfigVarsMax["role"]), - ), - ), - }, - // Import - { - ConfigVariables: testConfigVarsMin, - ResourceName: "stackit_sqlserverflex_instance.instance", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - - return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"backup_schedule"}, - ImportStateCheck: func(s []*terraform.InstanceState) error { - if len(s) != 1 { - return fmt.Errorf("expected 1 state, got %d", len(s)) - } - return nil - }, - }, - { - ResourceName: "stackit_sqlserverflex_user.user", - ConfigVariables: testConfigVarsMin, - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - userId, ok := r.Primary.Attributes["user_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute user_id") - } - - return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - // Update - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: configVarsMinUpdated(), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "project_id", - testutil.ConvertConfigVariable(configVarsMinUpdated()["project_id"]), - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "name", - testutil.ConvertConfigVariable(configVarsMinUpdated()["name"]), - ), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttrSet( - "stackit_sqlserverflex_instance.instance", - "flavor.description", - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.cpu", - testutil.ConvertConfigVariable(configVarsMinUpdated()["flavor_cpu"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.ram", - testutil.ConvertConfigVariable(configVarsMinUpdated()["flavor_ram"]), - ), - ), - }, - // Deletion is done by the framework implicitly + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), }, + // Update + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: testConfigVarsMin, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), + }, + // data source + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: testConfigVarsMin, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_user.user", "instance_id", + ), + + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_id"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), + + // User data + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutils.ConvertConfigVariable(testConfigVarsMin["username"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutils.ConvertConfigVariable(testConfigVarsMax["role"])), + ), + }, + // Import + { + ConfigVariables: testConfigVarsMin, + ResourceName: "stackit_sqlserverflex_instance.instance", + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + + return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"backup_schedule"}, + ImportStateCheck: func(s []*terraform.InstanceState) error { + if len(s) != 1 { + return fmt.Errorf("expected 1 state, got %d", len(s)) + } + return nil + }, + }, + { + ResourceName: "stackit_sqlserverflex_user.user", + ConfigVariables: testConfigVarsMin, + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + userId, ok := r.Primary.Attributes["user_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute user_id") + } + + return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + // Update + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: configVarsMinUpdated(), + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(configVarsMinUpdated()["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(configVarsMinUpdated()["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(configVarsMinUpdated()["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(configVarsMinUpdated()["flavor_ram"])), + ), + }, + // Deletion is done by the framework implicitly }, - ) + }) } func TestAccSQLServerFlexMaxResource(t *testing.T) { - resource.Test( - t, resource.TestCase{ - ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccChecksqlserverflexDestroy, - Steps: []resource.TestStep{ - // Creation - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "project_id", - testutil.ConvertConfigVariable(testConfigVarsMax["project_id"]), - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "name", - testutil.ConvertConfigVariable(testConfigVarsMax["name"]), - ), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "acl.0", - testutil.ConvertConfigVariable(testConfigVarsMax["acl1"]), - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.description", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "replicas", - testutil.ConvertConfigVariable(testConfigVarsMax["replicas"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.cpu", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.ram", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "storage.class", - testutil.ConvertConfigVariable(testConfigVarsMax["storage_class"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "storage.size", - testutil.ConvertConfigVariable(testConfigVarsMax["storage_size"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "version", - testutil.ConvertConfigVariable(testConfigVarsMax["server_version"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "options.retention_days", - testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "backup_schedule", - testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "region", - testutil.Region, - ), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccChecksqlserverflexDestroy, + Steps: []resource.TestStep{ + // Creation + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: testConfigVarsMax, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(testConfigVarsMax["storage_class"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(testConfigVarsMax["storage_size"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(testConfigVarsMax["server_version"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutils.Region), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", ), - }, - // Update - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "project_id", - testutil.ConvertConfigVariable(testConfigVarsMax["project_id"]), - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "name", - testutil.ConvertConfigVariable(testConfigVarsMax["name"]), - ), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "acl.0", - testutil.ConvertConfigVariable(testConfigVarsMax["acl1"]), - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.description", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "replicas", - testutil.ConvertConfigVariable(testConfigVarsMax["replicas"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.cpu", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.ram", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "storage.class", - testutil.ConvertConfigVariable(testConfigVarsMax["storage_class"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "storage.size", - testutil.ConvertConfigVariable(testConfigVarsMax["storage_size"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "version", - testutil.ConvertConfigVariable(testConfigVarsMax["server_version"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "options.retention_days", - testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "backup_schedule", - testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "region", - testutil.Region, - ), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", ), - }, - // data source - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "project_id", - testutil.ConvertConfigVariable(testConfigVarsMax["project_id"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "name", - testutil.ConvertConfigVariable(testConfigVarsMax["name"]), - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_user.user", "instance_id", - ), - - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "acl.0", - testutil.ConvertConfigVariable(testConfigVarsMax["acl1"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "flavor.id", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_id"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "flavor.description", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "flavor.cpu", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "flavor.ram", - testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "replicas", - testutil.ConvertConfigVariable(testConfigVarsMax["replicas"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "options.retention_days", - testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"]), - ), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_instance.instance", - "backup_schedule", - testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]), - ), - - // User data - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_user.user", - "project_id", - testutil.ConvertConfigVariable(testConfigVarsMax["project_id"]), - ), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_user.user", - "username", - testutil.ConvertConfigVariable(testConfigVarsMax["username"]), - ), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), - resource.TestCheckResourceAttr( - "data.stackit_sqlserverflex_user.user", - "roles.0", - testutil.ConvertConfigVariable(testConfigVarsMax["role"]), - ), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "host"), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "port"), - ), - }, - // Import - { - ConfigVariables: testConfigVarsMax, - ResourceName: "stackit_sqlserverflex_instance.instance", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - - return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"backup_schedule"}, - ImportStateCheck: func(s []*terraform.InstanceState) error { - if len(s) != 1 { - return fmt.Errorf("expected 1 state, got %d", len(s)) - } - if s[0].Attributes["backup_schedule"] != testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]) { - return fmt.Errorf( - "expected backup_schedule %s, got %s", - testConfigVarsMax["backup_schedule"], - s[0].Attributes["backup_schedule"], - ) - } - return nil - }, - }, - { - ResourceName: "stackit_sqlserverflex_user.user", - ConfigVariables: testConfigVarsMax, - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - userId, ok := r.Primary.Attributes["user_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute user_id") - } - - return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - // Update - { - Config: testutil.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: configVarsMaxUpdated(), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "project_id", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["project_id"]), - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "name", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["name"]), - ), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "acl.0", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["acl1"]), - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttrSet( - "stackit_sqlserverflex_instance.instance", - "flavor.description", - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.cpu", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["flavor_cpu"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "flavor.ram", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["flavor_ram"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "replicas", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["replicas"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "storage.class", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["storage_class"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "storage.size", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["storage_size"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "version", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["server_version"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "options.retention_days", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["options_retention_days"]), - ), - resource.TestCheckResourceAttr( - "stackit_sqlserverflex_instance.instance", - "backup_schedule", - testutil.ConvertConfigVariable(configVarsMaxUpdated()["backup_schedule"]), - ), - ), - }, - // Deletion is done by the framework implicitly + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), }, + // Update + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: testConfigVarsMax, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(testConfigVarsMax["storage_class"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(testConfigVarsMax["storage_size"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(testConfigVarsMax["server_version"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutils.Region), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), + }, + // data source + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: testConfigVarsMax, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_user.user", "instance_id", + ), + + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_id"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), + + // User data + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutils.ConvertConfigVariable(testConfigVarsMax["username"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutils.ConvertConfigVariable(testConfigVarsMax["role"])), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "host"), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "port"), + ), + }, + // Import + { + ConfigVariables: testConfigVarsMax, + ResourceName: "stackit_sqlserverflex_instance.instance", + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + + return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"backup_schedule"}, + ImportStateCheck: func(s []*terraform.InstanceState) error { + if len(s) != 1 { + return fmt.Errorf("expected 1 state, got %d", len(s)) + } + if s[0].Attributes["backup_schedule"] != testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]) { + return fmt.Errorf("expected backup_schedule %s, got %s", testConfigVarsMax["backup_schedule"], s[0].Attributes["backup_schedule"]) + } + return nil + }, + }, + { + ResourceName: "stackit_sqlserverflex_user.user", + ConfigVariables: testConfigVarsMax, + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + userId, ok := r.Primary.Attributes["user_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute user_id") + } + + return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + // Update + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: configVarsMaxUpdated(), + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(configVarsMaxUpdated()["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(configVarsMaxUpdated()["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(configVarsMaxUpdated()["acl1"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(configVarsMaxUpdated()["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(configVarsMaxUpdated()["flavor_ram"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(configVarsMaxUpdated()["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(configVarsMaxUpdated()["storage_class"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(configVarsMaxUpdated()["storage_size"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(configVarsMaxUpdated()["server_version"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(configVarsMaxUpdated()["options_retention_days"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(configVarsMaxUpdated()["backup_schedule"])), + ), + }, + // Deletion is done by the framework implicitly }, - ) + }) } func testAccChecksqlserverflexDestroy(s *terraform.State) error { ctx := context.Background() var client *sqlserverflex.APIClient var err error - if testutil.SQLServerFlexCustomEndpoint == "" { + if testutils.SQLServerFlexCustomEndpoint == "" { client, err = sqlserverflex.NewAPIClient() } else { client, err = sqlserverflex.NewAPIClient( - coreconfig.WithEndpoint(testutil.SQLServerFlexCustomEndpoint), + coreconfig.WithEndpoint(testutils.SQLServerFlexCustomEndpoint), ) } if err != nil { @@ -788,7 +458,7 @@ func testAccChecksqlserverflexDestroy(s *terraform.State) error { instancesToDestroy = append(instancesToDestroy, instanceId) } - instancesResp, err := client.ListInstances(ctx, testutil.ProjectId, testutil.Region).Execute() + instancesResp, err := client.ListInstances(ctx, testutils.ProjectId, testutils.Region).Execute() if err != nil { return fmt.Errorf("getting instancesResp: %w", err) } @@ -799,23 +469,13 @@ func testAccChecksqlserverflexDestroy(s *terraform.State) error { continue } if utils.Contains(instancesToDestroy, *items[i].Id) { - err := client.DeleteInstanceExecute(ctx, testutil.ProjectId, *items[i].Id, testutil.Region) + err := client.DeleteInstanceExecute(ctx, testutils.ProjectId, *items[i].Id, testutils.Region) if err != nil { return fmt.Errorf("destroying instance %s during CheckDestroy: %w", *items[i].Id, err) } - _, err = wait.DeleteInstanceWaitHandler( - ctx, - client, - testutil.ProjectId, - *items[i].Id, - testutil.Region, - ).WaitWithContext(ctx) + _, err = wait.DeleteInstanceWaitHandler(ctx, client, testutils.ProjectId, *items[i].Id, testutils.Region).WaitWithContext(ctx) if err != nil { - return fmt.Errorf( - "destroying instance %s during CheckDestroy: waiting for deletion %w", - *items[i].Id, - err, - ) + return fmt.Errorf("destroying instance %s during CheckDestroy: waiting for deletion %w", *items[i].Id, err) } } } From ed7ff0f58e11380af8013c43d7e8b6c974b073ba Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Wed, 11 Feb 2026 14:20:41 +0000 Subject: [PATCH 16/41] chore: add tests (#48) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/48 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../resource.tf | 2 +- .../postgresflex_acc_test.go | 117 +++++++- .../testdata/instance_template.gompl | 24 ++ .../sqlserverflex_acc_test.go | 272 ++++++++++++++++++ .../testdata/instance_template.gompl | 53 ++++ stackit/provider.go | 3 + 6 files changed, 469 insertions(+), 2 deletions(-) create mode 100644 stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go create mode 100644 stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf index 9ec5c419..01469992 100644 --- a/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf +++ b/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf @@ -1,7 +1,7 @@ resource "stackitprivatepreview_postgresflexalpha_user" "example" { project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - username = "username" + name = "username" roles = ["role"] } diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go index dba1a086..29facfee 100644 --- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go +++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go @@ -110,6 +110,20 @@ type resData struct { AccessScope string RetentionDays uint32 Version string + Users []User + Databases []Database +} + +type User struct { + Name string + ProjectId string + Roles []string +} + +type Database struct { + Name string + ProjectId string + Owner string } func getExample() resData { @@ -133,7 +147,7 @@ func getExample() resData { } } -func TestAccResourceExample_basic(t *testing.T) { +func TestAccInstance(t *testing.T) { exData := getExample() resName := fmt.Sprintf( "stackitprivatepreview_postgresflexalpha_instance.%s", @@ -195,6 +209,107 @@ func TestAccResourceExample_basic(t *testing.T) { }) } +func TestAccInstanceWithUsers(t *testing.T) { + data := getExample() + userName := "testUser" + data.Users = []User{ + { + Name: userName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Roles: []string{"login"}, + }, + } + + resName := fmt.Sprintf( + "stackitprivatepreview_postgresflexalpha_instance.%s", + data.TfName, + ) + + resUserName := fmt.Sprintf( + "stackitprivatepreview_postgresflexalpha_user.%s", + userName, + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + data, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", data.Name), + resource.TestCheckResourceAttrSet(resName, "id"), + resource.TestCheckResourceAttr(resUserName, "name", userName), + resource.TestCheckResourceAttrSet(resUserName, "id"), + ), + }, + }, + }) +} + +func TestAccInstanceWithDatabases(t *testing.T) { + data := getExample() + dbName := "testDb" + userName := "testUser" + data.Users = []User{ + { + Name: userName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Roles: []string{"login"}, + }, + } + + data.Databases = []Database{ + { + Name: dbName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Owner: userName, + }, + } + + resName := fmt.Sprintf( + "stackitprivatepreview_postgresflexalpha_instance.%s", + data.TfName, + ) + + resUserName := fmt.Sprintf( + "stackitprivatepreview_postgresflexalpha_user.%s", + userName, + ) + + resDbName := fmt.Sprintf( + "stackitprivatepreview_postgresflexalpha_database.%s", + dbName, + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + data, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", data.Name), + resource.TestCheckResourceAttrSet(resName, "id"), + resource.TestCheckResourceAttr(resUserName, "name", userName), + resource.TestCheckResourceAttrSet(resUserName, "id"), + resource.TestCheckResourceAttr(resDbName, "name", dbName), + resource.TestCheckResourceAttr(resDbName, "owner", userName), + resource.TestCheckResourceAttrSet(resDbName, "id"), + ), + }, + }, + }) +} + //func setupMockServer() *httptest.Server { // mux := http.NewServeMux() // diff --git a/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl b/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl index b523868c..83444f7c 100644 --- a/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl +++ b/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl @@ -28,3 +28,27 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" { } version = {{ .Version }} } + +{{ if .Users }} +{{ $tfName := .TfName }} +{{ range $user := .Users }} +resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" { + project_id = "{{ $user.ProjectId }}" + instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id + name = "{{ $user.Name }}" + roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}] +} +{{ end }} +{{ end }} + +{{ if .Databases }} +{{ $tfName := .TfName }} +{{ range $db := .Databases }} +resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" { + project_id = "{{ $db.ProjectId }}" + instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id + name = "{{ $db.Name }}" + owner = "{{ $db.Owner }}" +} +{{ end }} +{{ end }} diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go new file mode 100644 index 00000000..9a3bb1b7 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go @@ -0,0 +1,272 @@ +package sqlserverflexbeta_test + +import ( + "context" + _ "embed" + "fmt" + "os" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" + // The fwresource import alias is so there is no collision + // with the more typical acceptance testing import: + // "github.com/hashicorp/terraform-plugin-testing/helper/resource" + fwresource "github.com/hashicorp/terraform-plugin-framework/resource" +) + +func TestInstanceResourceSchema(t *testing.T) { + t.Parallel() + + ctx := context.Background() + schemaRequest := fwresource.SchemaRequest{} + schemaResponse := &fwresource.SchemaResponse{} + + // Instantiate the resource.Resource and call its Schema method + sqlserverflexbeta.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse) + + if schemaResponse.Diagnostics.HasError() { + t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics) + } + + // Validate the schema + diagnostics := schemaResponse.Schema.ValidateImplementation(ctx) + + if diagnostics.HasError() { + t.Fatalf("Schema validation diagnostics: %+v", diagnostics) + } +} + +func TestMain(m *testing.M) { + testutils.Setup() + code := m.Run() + // shutdown() + os.Exit(code) +} + +func testAccPreCheck(t *testing.T) { + if _, ok := os.LookupEnv("TF_ACC_PROJECT_ID"); !ok { + t.Fatalf("could not find env var TF_ACC_PROJECT_ID") + } +} + +type resData struct { + ServiceAccountFilePath string + ProjectId string + Region string + Name string + TfName string + FlavorId string + BackupSchedule string + UseEncryption bool + KekKeyId string + KekKeyRingId string + KekKeyVersion uint8 + KekServiceAccount string + PerformanceClass string + Size uint32 + AclString string + AccessScope string + RetentionDays uint32 + Version string + Users []User + Databases []Database +} + +type User struct { + Name string + ProjectId string + Roles []string +} + +type Database struct { + Name string + ProjectId string + Owner string +} + +func getExample() resData { + name := acctest.RandomWithPrefix("tf-acc") + return resData{ + Region: os.Getenv("TF_ACC_REGION"), + ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"), + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Name: name, + TfName: name, + FlavorId: "4.16-Single", + BackupSchedule: "0 0 * * *", + UseEncryption: false, + RetentionDays: 33, + PerformanceClass: "premium-perf2-stackit", + Size: 10, + AclString: "0.0.0.0/0", + AccessScope: "PUBLIC", + Version: "2022", + } +} + +func TestAccInstance(t *testing.T) { + exData := getExample() + resName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexbeta_instance.%s", + exData.TfName, + ) + + updNameData := exData + updNameData.Name = "name-updated" + + updSizeData := exData + updSizeData.Size = 25 + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + exData, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", exData.Name), + resource.TestCheckResourceAttrSet(resName, "id"), + ), + }, + // Update name and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + updNameData, + ), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", updNameData.Name), + ), + }, + // Update size and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + updSizeData, + ), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + resName, + "storage.size", + strconv.Itoa(int(updSizeData.Size)), + ), + ), + }, + //// Import test + //{ + // ResourceName: "example_resource.test", + // ImportState: true, + // ImportStateVerify: true, + //}, + }, + }) +} + +func TestAccInstanceWithUsers(t *testing.T) { + data := getExample() + userName := "testUser" + data.Users = []User{ + { + Name: userName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, + }, + } + + resName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexbeta_instance.%s", + data.TfName, + ) + + resUserName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexbeta_user.%s", + userName, + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + data, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", data.Name), + resource.TestCheckResourceAttrSet(resName, "id"), + resource.TestCheckResourceAttr(resUserName, "name", userName), + resource.TestCheckResourceAttrSet(resUserName, "id"), + ), + }, + }, + }) +} + +func TestAccInstanceWithDatabases(t *testing.T) { + data := getExample() + dbName := "testDb" + userName := "testUser" + data.Users = []User{ + { + Name: userName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, + }, + } + data.Databases = []Database{ + { + Name: dbName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Owner: userName, + }, + } + + resName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexbeta_instance.%s", + data.TfName, + ) + + resUserName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexbeta_user.%s", + userName, + ) + + resDbName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexbeta_database.%s", + dbName, + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + data, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", data.Name), + resource.TestCheckResourceAttrSet(resName, "id"), + resource.TestCheckResourceAttr(resUserName, "name", userName), + resource.TestCheckResourceAttrSet(resUserName, "id"), + resource.TestCheckResourceAttr(resDbName, "name", dbName), + resource.TestCheckResourceAttr(resDbName, "owner", userName), + resource.TestCheckResourceAttrSet(resDbName, "id"), + ), + }, + }, + }) +} diff --git a/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl b/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl new file mode 100644 index 00000000..ddc95138 --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl @@ -0,0 +1,53 @@ +provider "stackitprivatepreview" { + default_region = "{{ .Region }}" + service_account_key_path = "{{ .ServiceAccountFilePath }}" +} + +resource "stackitprivatepreview_sqlserverflexbeta_instance" "{{ .TfName }}" { + project_id = "{{ .ProjectId }}" + name = "{{ .Name }}" + backup_schedule = "{{ .BackupSchedule }}" + retention_days = {{ .RetentionDays }} + flavor_id = "{{ .FlavorId }}" + storage = { + class = "{{ .PerformanceClass }}" + size = {{ .Size }} + } +{{ if .UseEncryption }} + encryption = { + kek_key_id = {{ .KekKeyId }} + kek_key_ring_id = {{ .KekKeyRingId }} + kek_key_version = {{ .KekKeyVersion }} + service_account = "{{ .KekServiceAccount }}" + } +{{ end }} + network = { + acl = ["{{ .AclString }}"] + access_scope = "{{ .AccessScope }}" + } + version = "{{ .Version }}" +} + +{{ if .Users }} +{{ $tfName := .TfName }} +{{ range $user := .Users }} +resource "stackitprivatepreview_sqlserverflexbeta_user" "{{ $user.Name }}" { + project_id = "{{ $user.ProjectId }}" + instance_id = stackitprivatepreview_sqlserverflexbeta_instance.{{ $tfName }}.instance_id + username = "{{ $user.Name }}" + roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}] +} +{{ end }} +{{ end }} + +{{ if .Databases }} +{{ $tfName := .TfName }} +{{ range $db := .Databases }} +resource "stackitprivatepreview_sqlserverflexbeta_database" "{{ $db.Name }}" { + project_id = "{{ $db.ProjectId }}" + instance_id = stackitprivatepreview_sqlserverflexbeta_instance.{{ $tfName }}.instance_id + name = "{{ $db.Name }}" + owner = "{{ $db.Owner }}" +} +{{ end }} +{{ end }} diff --git a/stackit/provider.go b/stackit/provider.go index 545d853e..bf9e2b71 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -36,6 +36,7 @@ import ( sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database" sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor" sqlserverflexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" + sqlserverFlexBetaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user" ) // Ensure the implementation satisfies the expected interfaces @@ -514,6 +515,7 @@ func (p *Provider) DataSources(_ context.Context) []func() datasource.DataSource sqlserverflexBetaDatabase.NewDatabaseDataSource, sqlserverflexBetaInstance.NewInstanceDataSource, + sqlserverFlexBetaUser.NewUserDataSource, sqlserverFlexBetaFlavor.NewFlavorDataSource, } } @@ -530,6 +532,7 @@ func (p *Provider) Resources(_ context.Context) []func() resource.Resource { sqlserverflexalphaDatabase.NewDatabaseResource, sqlserverflexBetaInstance.NewInstanceResource, + sqlserverFlexBetaUser.NewUserResource, sqlserverflexBetaDatabase.NewDatabaseResource, } return resources From 86fc98461c5e5fed53c687a6983cf176a52162d6 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Wed, 11 Feb 2026 15:21:57 +0000 Subject: [PATCH 17/41] chore: add sqlserveralpha tests (#49) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/49 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../build/templates/resource_scaffold.gotmpl | 62 +- .../sqlserverflex_acc_test.go | 715 ++++++------------ .../sqlserverflex_acc_test.go_old.bak | 483 ++++++++++++ .../testdata/instance_template.gompl | 53 ++ .../testdata/resource-max.tf | 52 -- .../testdata/resource-min.tf | 34 - stackit/provider.go | 2 +- 7 files changed, 814 insertions(+), 587 deletions(-) create mode 100644 stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go_old.bak create mode 100644 stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl delete mode 100644 stackit/internal/services/sqlserverflexalpha/testdata/resource-max.tf delete mode 100644 stackit/internal/services/sqlserverflexalpha/testdata/resource-min.tf diff --git a/cmd/cmd/build/templates/resource_scaffold.gotmpl b/cmd/cmd/build/templates/resource_scaffold.gotmpl index e497c8ad..8b612f9c 100644 --- a/cmd/cmd/build/templates/resource_scaffold.gotmpl +++ b/cmd/cmd/build/templates/resource_scaffold.gotmpl @@ -42,14 +42,16 @@ type {{.NameCamel}}Resource struct{ type {{.NamePascal}}ResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` - {{.NamePascal}}ID types.String `tfsdk:"instance_id"` // TODO: implement further needed parts + {{.NamePascal}}ID types.String `tfsdk:"instance_id"` } +// Metadata defines terraform resource name func (r *{{.NameCamel}}Resource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = req.ProviderTypeName + "_{{.PackageName}}_{{.NameSnake}}" } +// Schema loads the schema from generated files and adds plan modifiers func (r *{{.NameCamel}}Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { schema = {{.PackageName}}ResGen.{{.NamePascal}}ResourceSchema(ctx) @@ -67,6 +69,7 @@ func (r *{{.NameCamel}}Resource) Schema(ctx context.Context, req resource.Schema resp.Schema = schema } +// IdentitySchema defines the identity schema func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) { resp.IdentitySchema = identityschema.Schema{ Attributes: map[string]identityschema.Attribute{ @@ -83,7 +86,6 @@ func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.Identity } } - // Configure adds the provider configured client to the resource. func (r *{{.NameCamel}}Resource) Configure( ctx context.Context, @@ -152,18 +154,6 @@ func (r *{{.NameCamel}}Resource) ModifyPlan( return } - var identityModel {{.NamePascal}}ResourceIdentityModel - identityModel.ProjectID = planModel.ProjectId - identityModel.Region = planModel.Region - if !planModel.{{.NamePascal}}Id.IsNull() && !planModel.{{.NamePascal}}Id.IsUnknown() { - identityModel.{{.NamePascal}}ID = planModel.{{.NamePascal}}Id - } - - resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) - if resp.Diagnostics.HasError() { - return - } - resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) if resp.Diagnostics.HasError() { return @@ -184,17 +174,10 @@ func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.Create return } - // Read identity data - var identityData {{.NamePascal}}ResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) @@ -320,7 +303,7 @@ func (r *{{.NameCamel}}Resource) Read(ctx context.Context, req resource.ReadRequ ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) - // Todo: Read API call logic + // TODO: Read API call logic // Save updated data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) @@ -348,22 +331,26 @@ func (r *{{.NameCamel}}Resource) Update(ctx context.Context, req resource.Update return } - // Read identity data - var identityData {{.NamePascal}}ResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) - // Todo: Update API call logic + // TODO: Update API call logic + + // TODO: Set data returned by API in identity + identity := {{.NamePascal}}ResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + // TODO: add missing values + {{.NamePascal}}ID: types.StringValue({{.NamePascal}}Id), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } // Save updated data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) @@ -395,7 +382,7 @@ func (r *{{.NameCamel}}Resource) Delete(ctx context.Context, req resource.Delete ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) - // Todo: Delete API call logic + // TODO: Delete API call logic tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} deleted") } @@ -409,7 +396,8 @@ func (r *{{.NameCamel}}Resource) ImportState( ) { idParts := strings.Split(req.ID, core.Separator) - // Todo: Import logic + // TODO: Import logic + // TODO: fix len and parts itself if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" { core.LogAndAddError( ctx, &resp.Diagnostics, diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go index 5138deee..6c2915ea 100644 --- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go @@ -1,483 +1,272 @@ -// Copyright (c) STACKIT - package sqlserverflexalpha_test import ( "context" _ "embed" "fmt" - "maps" - "strings" + "os" + "strconv" "testing" - "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - coreconfig "github.com/stackitcloud/stackit-sdk-go/core/config" - "github.com/stackitcloud/stackit-sdk-go/core/utils" - "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex" - "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/wait" + sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + // The fwresource import alias is so there is no collision + // with the more typical acceptance testing import: + // "github.com/hashicorp/terraform-plugin-testing/helper/resource" + fwresource "github.com/hashicorp/terraform-plugin-framework/resource" ) -var ( - //go:embed testdata/resource-max.tf - resourceMaxConfig string - //go:embed testdata/resource-min.tf - resourceMinConfig string -) +func TestInstanceResourceSchema(t *testing.T) { + t.Parallel() -var testConfigVarsMin = config.Variables{ - "project_id": config.StringVariable(testutils.ProjectId), - "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), - "flavor_cpu": config.IntegerVariable(4), - "flavor_ram": config.IntegerVariable(16), - "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"), - "replicas": config.IntegerVariable(1), - "flavor_id": config.StringVariable("4.16-Single"), - "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), - "role": config.StringVariable("##STACKIT_LoginManager##"), -} - -var testConfigVarsMax = config.Variables{ - "project_id": config.StringVariable(testutils.ProjectId), - "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), - "acl1": config.StringVariable("192.168.0.0/16"), - "flavor_cpu": config.IntegerVariable(4), - "flavor_ram": config.IntegerVariable(16), - "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"), - "storage_class": config.StringVariable("premium-perf2-stackit"), - "storage_size": config.IntegerVariable(40), - "server_version": config.StringVariable("2022"), - "replicas": config.IntegerVariable(1), - "options_retention_days": config.IntegerVariable(64), - "flavor_id": config.StringVariable("4.16-Single"), - "backup_schedule": config.StringVariable("00 6 * * *"), - "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), - "role": config.StringVariable("##STACKIT_LoginManager##"), - "region": config.StringVariable(testutils.Region), -} - -func configVarsMinUpdated() config.Variables { - temp := maps.Clone(testConfigVarsMax) - temp["name"] = config.StringVariable(testutils.ConvertConfigVariable(temp["name"]) + "changed") - return temp -} - -func configVarsMaxUpdated() config.Variables { - temp := maps.Clone(testConfigVarsMax) - temp["backup_schedule"] = config.StringVariable("00 12 * * *") - return temp -} - -func TestAccSQLServerFlexMinResource(t *testing.T) { - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccChecksqlserverflexDestroy, - Steps: []resource.TestStep{ - // Creation - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMin["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // Update - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // data source - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_user.user", "instance_id", - ), - - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), - - // User data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutils.ConvertConfigVariable(testConfigVarsMin["username"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutils.ConvertConfigVariable(testConfigVarsMax["role"])), - ), - }, - // Import - { - ConfigVariables: testConfigVarsMin, - ResourceName: "stackit_sqlserverflex_instance.instance", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - - return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"backup_schedule"}, - ImportStateCheck: func(s []*terraform.InstanceState) error { - if len(s) != 1 { - return fmt.Errorf("expected 1 state, got %d", len(s)) - } - return nil - }, - }, - { - ResourceName: "stackit_sqlserverflex_user.user", - ConfigVariables: testConfigVarsMin, - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - userId, ok := r.Primary.Attributes["user_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute user_id") - } - - return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - // Update - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: configVarsMinUpdated(), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(configVarsMinUpdated()["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(configVarsMinUpdated()["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(configVarsMinUpdated()["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(configVarsMinUpdated()["flavor_ram"])), - ), - }, - // Deletion is done by the framework implicitly - }, - }) -} - -func TestAccSQLServerFlexMaxResource(t *testing.T) { - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccChecksqlserverflexDestroy, - Steps: []resource.TestStep{ - // Creation - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(testConfigVarsMax["storage_class"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(testConfigVarsMax["storage_size"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(testConfigVarsMax["server_version"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutils.Region), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // Update - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(testConfigVarsMax["storage_class"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(testConfigVarsMax["storage_size"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(testConfigVarsMax["server_version"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutils.Region), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // data source - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_user.user", "instance_id", - ), - - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), - - // User data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutils.ConvertConfigVariable(testConfigVarsMax["username"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutils.ConvertConfigVariable(testConfigVarsMax["role"])), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "host"), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "port"), - ), - }, - // Import - { - ConfigVariables: testConfigVarsMax, - ResourceName: "stackit_sqlserverflex_instance.instance", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - - return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"backup_schedule"}, - ImportStateCheck: func(s []*terraform.InstanceState) error { - if len(s) != 1 { - return fmt.Errorf("expected 1 state, got %d", len(s)) - } - if s[0].Attributes["backup_schedule"] != testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]) { - return fmt.Errorf("expected backup_schedule %s, got %s", testConfigVarsMax["backup_schedule"], s[0].Attributes["backup_schedule"]) - } - return nil - }, - }, - { - ResourceName: "stackit_sqlserverflex_user.user", - ConfigVariables: testConfigVarsMax, - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - userId, ok := r.Primary.Attributes["user_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute user_id") - } - - return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - // Update - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: configVarsMaxUpdated(), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(configVarsMaxUpdated()["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(configVarsMaxUpdated()["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(configVarsMaxUpdated()["acl1"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(configVarsMaxUpdated()["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(configVarsMaxUpdated()["flavor_ram"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(configVarsMaxUpdated()["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(configVarsMaxUpdated()["storage_class"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(configVarsMaxUpdated()["storage_size"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(configVarsMaxUpdated()["server_version"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(configVarsMaxUpdated()["options_retention_days"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(configVarsMaxUpdated()["backup_schedule"])), - ), - }, - // Deletion is done by the framework implicitly - }, - }) -} - -func testAccChecksqlserverflexDestroy(s *terraform.State) error { ctx := context.Background() - var client *sqlserverflex.APIClient - var err error - if testutils.SQLServerFlexCustomEndpoint == "" { - client, err = sqlserverflex.NewAPIClient() - } else { - client, err = sqlserverflex.NewAPIClient( - coreconfig.WithEndpoint(testutils.SQLServerFlexCustomEndpoint), - ) - } - if err != nil { - return fmt.Errorf("creating client: %w", err) + schemaRequest := fwresource.SchemaRequest{} + schemaResponse := &fwresource.SchemaResponse{} + + // Instantiate the resource.Resource and call its Schema method + sqlserverflexalpha.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse) + + if schemaResponse.Diagnostics.HasError() { + t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics) } - instancesToDestroy := []string{} - for _, rs := range s.RootModule().Resources { - if rs.Type != "stackit_sqlserverflex_instance" { - continue - } - // instance terraform ID: = "[project_id],[region],[instance_id]" - instanceId := strings.Split(rs.Primary.ID, core.Separator)[2] - instancesToDestroy = append(instancesToDestroy, instanceId) - } + // Validate the schema + diagnostics := schemaResponse.Schema.ValidateImplementation(ctx) - instancesResp, err := client.ListInstances(ctx, testutils.ProjectId, testutils.Region).Execute() - if err != nil { - return fmt.Errorf("getting instancesResp: %w", err) + if diagnostics.HasError() { + t.Fatalf("Schema validation diagnostics: %+v", diagnostics) } - - items := *instancesResp.Items - for i := range items { - if items[i].Id == nil { - continue - } - if utils.Contains(instancesToDestroy, *items[i].Id) { - err := client.DeleteInstanceExecute(ctx, testutils.ProjectId, *items[i].Id, testutils.Region) - if err != nil { - return fmt.Errorf("destroying instance %s during CheckDestroy: %w", *items[i].Id, err) - } - _, err = wait.DeleteInstanceWaitHandler(ctx, client, testutils.ProjectId, *items[i].Id, testutils.Region).WaitWithContext(ctx) - if err != nil { - return fmt.Errorf("destroying instance %s during CheckDestroy: waiting for deletion %w", *items[i].Id, err) - } - } - } - return nil +} + +func TestMain(m *testing.M) { + testutils.Setup() + code := m.Run() + // shutdown() + os.Exit(code) +} + +func testAccPreCheck(t *testing.T) { + if _, ok := os.LookupEnv("TF_ACC_PROJECT_ID"); !ok { + t.Fatalf("could not find env var TF_ACC_PROJECT_ID") + } +} + +type resData struct { + ServiceAccountFilePath string + ProjectId string + Region string + Name string + TfName string + FlavorId string + BackupSchedule string + UseEncryption bool + KekKeyId string + KekKeyRingId string + KekKeyVersion uint8 + KekServiceAccount string + PerformanceClass string + Size uint32 + AclString string + AccessScope string + RetentionDays uint32 + Version string + Users []User + Databases []Database +} + +type User struct { + Name string + ProjectId string + Roles []string +} + +type Database struct { + Name string + ProjectId string + Owner string +} + +func getExample() resData { + name := acctest.RandomWithPrefix("tf-acc") + return resData{ + Region: os.Getenv("TF_ACC_REGION"), + ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"), + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Name: name, + TfName: name, + FlavorId: "4.16-Single", + BackupSchedule: "0 0 * * *", + UseEncryption: false, + RetentionDays: 33, + PerformanceClass: "premium-perf2-stackit", + Size: 10, + AclString: "0.0.0.0/0", + AccessScope: "PUBLIC", + Version: "2022", + } +} + +func TestAccInstance(t *testing.T) { + exData := getExample() + resName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexalpha_instance.%s", + exData.TfName, + ) + + updNameData := exData + updNameData.Name = "name-updated" + + updSizeData := exData + updSizeData.Size = 25 + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + exData, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", exData.Name), + resource.TestCheckResourceAttrSet(resName, "id"), + ), + }, + // Update name and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + updNameData, + ), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", updNameData.Name), + ), + }, + // Update size and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + updSizeData, + ), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + resName, + "storage.size", + strconv.Itoa(int(updSizeData.Size)), + ), + ), + }, + //// Import test + //{ + // ResourceName: "example_resource.test", + // ImportState: true, + // ImportStateVerify: true, + //}, + }, + }) +} + +func TestAccInstanceWithUsers(t *testing.T) { + data := getExample() + userName := "testUser" + data.Users = []User{ + { + Name: userName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, + }, + } + + resName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexalpha_instance.%s", + data.TfName, + ) + + resUserName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexalpha_user.%s", + userName, + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + data, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", data.Name), + resource.TestCheckResourceAttrSet(resName, "id"), + resource.TestCheckResourceAttr(resUserName, "name", userName), + resource.TestCheckResourceAttrSet(resUserName, "id"), + ), + }, + }, + }) +} + +func TestAccInstanceWithDatabases(t *testing.T) { + data := getExample() + dbName := "testDb" + userName := "testUser" + data.Users = []User{ + { + Name: userName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, + }, + } + data.Databases = []Database{ + { + Name: dbName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Owner: userName, + }, + } + + resName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexalpha_instance.%s", + data.TfName, + ) + + resUserName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexalpha_user.%s", + userName, + ) + + resDbName := fmt.Sprintf( + "stackitprivatepreview_sqlserverflexalpha_database.%s", + dbName, + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + data, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName, "name", data.Name), + resource.TestCheckResourceAttrSet(resName, "id"), + resource.TestCheckResourceAttr(resUserName, "name", userName), + resource.TestCheckResourceAttrSet(resUserName, "id"), + resource.TestCheckResourceAttr(resDbName, "name", dbName), + resource.TestCheckResourceAttr(resDbName, "owner", userName), + resource.TestCheckResourceAttrSet(resDbName, "id"), + ), + }, + }, + }) } diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go_old.bak b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go_old.bak new file mode 100644 index 00000000..5138deee --- /dev/null +++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go_old.bak @@ -0,0 +1,483 @@ +// Copyright (c) STACKIT + +package sqlserverflexalpha_test + +import ( + "context" + _ "embed" + "fmt" + "maps" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + coreconfig "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex" + "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/wait" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" +) + +var ( + //go:embed testdata/resource-max.tf + resourceMaxConfig string + //go:embed testdata/resource-min.tf + resourceMinConfig string +) + +var testConfigVarsMin = config.Variables{ + "project_id": config.StringVariable(testutils.ProjectId), + "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), + "flavor_cpu": config.IntegerVariable(4), + "flavor_ram": config.IntegerVariable(16), + "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"), + "replicas": config.IntegerVariable(1), + "flavor_id": config.StringVariable("4.16-Single"), + "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), + "role": config.StringVariable("##STACKIT_LoginManager##"), +} + +var testConfigVarsMax = config.Variables{ + "project_id": config.StringVariable(testutils.ProjectId), + "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), + "acl1": config.StringVariable("192.168.0.0/16"), + "flavor_cpu": config.IntegerVariable(4), + "flavor_ram": config.IntegerVariable(16), + "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"), + "storage_class": config.StringVariable("premium-perf2-stackit"), + "storage_size": config.IntegerVariable(40), + "server_version": config.StringVariable("2022"), + "replicas": config.IntegerVariable(1), + "options_retention_days": config.IntegerVariable(64), + "flavor_id": config.StringVariable("4.16-Single"), + "backup_schedule": config.StringVariable("00 6 * * *"), + "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), + "role": config.StringVariable("##STACKIT_LoginManager##"), + "region": config.StringVariable(testutils.Region), +} + +func configVarsMinUpdated() config.Variables { + temp := maps.Clone(testConfigVarsMax) + temp["name"] = config.StringVariable(testutils.ConvertConfigVariable(temp["name"]) + "changed") + return temp +} + +func configVarsMaxUpdated() config.Variables { + temp := maps.Clone(testConfigVarsMax) + temp["backup_schedule"] = config.StringVariable("00 12 * * *") + return temp +} + +func TestAccSQLServerFlexMinResource(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccChecksqlserverflexDestroy, + Steps: []resource.TestStep{ + // Creation + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: testConfigVarsMin, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMin["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), + }, + // Update + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: testConfigVarsMin, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), + }, + // data source + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: testConfigVarsMin, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_user.user", "instance_id", + ), + + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_id"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), + + // User data + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutils.ConvertConfigVariable(testConfigVarsMin["username"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutils.ConvertConfigVariable(testConfigVarsMax["role"])), + ), + }, + // Import + { + ConfigVariables: testConfigVarsMin, + ResourceName: "stackit_sqlserverflex_instance.instance", + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + + return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"backup_schedule"}, + ImportStateCheck: func(s []*terraform.InstanceState) error { + if len(s) != 1 { + return fmt.Errorf("expected 1 state, got %d", len(s)) + } + return nil + }, + }, + { + ResourceName: "stackit_sqlserverflex_user.user", + ConfigVariables: testConfigVarsMin, + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + userId, ok := r.Primary.Attributes["user_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute user_id") + } + + return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + // Update + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, + ConfigVariables: configVarsMinUpdated(), + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(configVarsMinUpdated()["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(configVarsMinUpdated()["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(configVarsMinUpdated()["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(configVarsMinUpdated()["flavor_ram"])), + ), + }, + // Deletion is done by the framework implicitly + }, + }) +} + +func TestAccSQLServerFlexMaxResource(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccChecksqlserverflexDestroy, + Steps: []resource.TestStep{ + // Creation + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: testConfigVarsMax, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(testConfigVarsMax["storage_class"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(testConfigVarsMax["storage_size"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(testConfigVarsMax["server_version"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutils.Region), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), + }, + // Update + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: testConfigVarsMax, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(testConfigVarsMax["storage_class"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(testConfigVarsMax["storage_size"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(testConfigVarsMax["server_version"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutils.Region), + // User + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), + ), + }, + // data source + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: testConfigVarsMax, + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "project_id", + "stackit_sqlserverflex_instance.instance", "project_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_instance.instance", "instance_id", + "stackit_sqlserverflex_instance.instance", "instance_id", + ), + resource.TestCheckResourceAttrPair( + "data.stackit_sqlserverflex_user.user", "instance_id", + "stackit_sqlserverflex_user.user", "instance_id", + ), + + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_id"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), + + // User data + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutils.ConvertConfigVariable(testConfigVarsMax["username"])), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), + resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutils.ConvertConfigVariable(testConfigVarsMax["role"])), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "host"), + resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "port"), + ), + }, + // Import + { + ConfigVariables: testConfigVarsMax, + ResourceName: "stackit_sqlserverflex_instance.instance", + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + + return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"backup_schedule"}, + ImportStateCheck: func(s []*terraform.InstanceState) error { + if len(s) != 1 { + return fmt.Errorf("expected 1 state, got %d", len(s)) + } + if s[0].Attributes["backup_schedule"] != testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]) { + return fmt.Errorf("expected backup_schedule %s, got %s", testConfigVarsMax["backup_schedule"], s[0].Attributes["backup_schedule"]) + } + return nil + }, + }, + { + ResourceName: "stackit_sqlserverflex_user.user", + ConfigVariables: testConfigVarsMax, + ImportStateIdFunc: func(s *terraform.State) (string, error) { + r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] + if !ok { + return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") + } + instanceId, ok := r.Primary.Attributes["instance_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute instance_id") + } + userId, ok := r.Primary.Attributes["user_id"] + if !ok { + return "", fmt.Errorf("couldn't find attribute user_id") + } + + return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + // Update + { + Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, + ConfigVariables: configVarsMaxUpdated(), + Check: resource.ComposeAggregateTestCheckFunc( + // Instance data + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(configVarsMaxUpdated()["project_id"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(configVarsMaxUpdated()["name"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(configVarsMaxUpdated()["acl1"])), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), + resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(configVarsMaxUpdated()["flavor_cpu"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(configVarsMaxUpdated()["flavor_ram"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(configVarsMaxUpdated()["replicas"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(configVarsMaxUpdated()["storage_class"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(configVarsMaxUpdated()["storage_size"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(configVarsMaxUpdated()["server_version"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(configVarsMaxUpdated()["options_retention_days"])), + resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(configVarsMaxUpdated()["backup_schedule"])), + ), + }, + // Deletion is done by the framework implicitly + }, + }) +} + +func testAccChecksqlserverflexDestroy(s *terraform.State) error { + ctx := context.Background() + var client *sqlserverflex.APIClient + var err error + if testutils.SQLServerFlexCustomEndpoint == "" { + client, err = sqlserverflex.NewAPIClient() + } else { + client, err = sqlserverflex.NewAPIClient( + coreconfig.WithEndpoint(testutils.SQLServerFlexCustomEndpoint), + ) + } + if err != nil { + return fmt.Errorf("creating client: %w", err) + } + + instancesToDestroy := []string{} + for _, rs := range s.RootModule().Resources { + if rs.Type != "stackit_sqlserverflex_instance" { + continue + } + // instance terraform ID: = "[project_id],[region],[instance_id]" + instanceId := strings.Split(rs.Primary.ID, core.Separator)[2] + instancesToDestroy = append(instancesToDestroy, instanceId) + } + + instancesResp, err := client.ListInstances(ctx, testutils.ProjectId, testutils.Region).Execute() + if err != nil { + return fmt.Errorf("getting instancesResp: %w", err) + } + + items := *instancesResp.Items + for i := range items { + if items[i].Id == nil { + continue + } + if utils.Contains(instancesToDestroy, *items[i].Id) { + err := client.DeleteInstanceExecute(ctx, testutils.ProjectId, *items[i].Id, testutils.Region) + if err != nil { + return fmt.Errorf("destroying instance %s during CheckDestroy: %w", *items[i].Id, err) + } + _, err = wait.DeleteInstanceWaitHandler(ctx, client, testutils.ProjectId, *items[i].Id, testutils.Region).WaitWithContext(ctx) + if err != nil { + return fmt.Errorf("destroying instance %s during CheckDestroy: waiting for deletion %w", *items[i].Id, err) + } + } + } + return nil +} diff --git a/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl b/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl new file mode 100644 index 00000000..035a4344 --- /dev/null +++ b/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl @@ -0,0 +1,53 @@ +provider "stackitprivatepreview" { + default_region = "{{ .Region }}" + service_account_key_path = "{{ .ServiceAccountFilePath }}" +} + +resource "stackitprivatepreview_sqlserverflexalpha_instance" "{{ .TfName }}" { + project_id = "{{ .ProjectId }}" + name = "{{ .Name }}" + backup_schedule = "{{ .BackupSchedule }}" + retention_days = {{ .RetentionDays }} + flavor_id = "{{ .FlavorId }}" + storage = { + class = "{{ .PerformanceClass }}" + size = {{ .Size }} + } +{{ if .UseEncryption }} + encryption = { + kek_key_id = {{ .KekKeyId }} + kek_key_ring_id = {{ .KekKeyRingId }} + kek_key_version = {{ .KekKeyVersion }} + service_account = "{{ .KekServiceAccount }}" + } +{{ end }} + network = { + acl = ["{{ .AclString }}"] + access_scope = "{{ .AccessScope }}" + } + version = "{{ .Version }}" +} + +{{ if .Users }} +{{ $tfName := .TfName }} +{{ range $user := .Users }} +resource "stackitprivatepreview_sqlserverflexalpha_user" "{{ $user.Name }}" { + project_id = "{{ $user.ProjectId }}" + instance_id = stackitprivatepreview_sqlserverflexalpha_instance.{{ $tfName }}.instance_id + username = "{{ $user.Name }}" + roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}] +} +{{ end }} +{{ end }} + +{{ if .Databases }} +{{ $tfName := .TfName }} +{{ range $db := .Databases }} +resource "stackitprivatepreview_sqlserverflexalpha_database" "{{ $db.Name }}" { + project_id = "{{ $db.ProjectId }}" + instance_id = stackitprivatepreview_sqlserverflexalpha_instance.{{ $tfName }}.instance_id + name = "{{ $db.Name }}" + owner = "{{ $db.Owner }}" +} +{{ end }} +{{ end }} diff --git a/stackit/internal/services/sqlserverflexalpha/testdata/resource-max.tf b/stackit/internal/services/sqlserverflexalpha/testdata/resource-max.tf deleted file mode 100644 index b365f096..00000000 --- a/stackit/internal/services/sqlserverflexalpha/testdata/resource-max.tf +++ /dev/null @@ -1,52 +0,0 @@ - -variable "project_id" {} -variable "name" {} -variable "acl1" {} -variable "flavor_cpu" {} -variable "flavor_ram" {} -variable "storage_class" {} -variable "storage_size" {} -variable "options_retention_days" {} -variable "backup_schedule" {} -variable "username" {} -variable "role" {} -variable "server_version" {} -variable "region" {} - -resource "stackit_sqlserverflex_instance" "instance" { - project_id = var.project_id - name = var.name - acl = [var.acl1] - flavor = { - cpu = var.flavor_cpu - ram = var.flavor_ram - } - storage = { - class = var.storage_class - size = var.storage_size - } - version = var.server_version - options = { - retention_days = var.options_retention_days - } - backup_schedule = var.backup_schedule - region = var.region -} - -resource "stackit_sqlserverflex_user" "user" { - project_id = stackit_sqlserverflex_instance.instance.project_id - instance_id = stackit_sqlserverflex_instance.instance.instance_id - username = var.username - roles = [var.role] -} - -data "stackit_sqlserverflex_instance" "instance" { - project_id = var.project_id - instance_id = stackit_sqlserverflex_instance.instance.instance_id -} - -data "stackit_sqlserverflex_user" "user" { - project_id = var.project_id - instance_id = stackit_sqlserverflex_instance.instance.instance_id - user_id = stackit_sqlserverflex_user.user.user_id -} diff --git a/stackit/internal/services/sqlserverflexalpha/testdata/resource-min.tf b/stackit/internal/services/sqlserverflexalpha/testdata/resource-min.tf deleted file mode 100644 index 3f17d5cc..00000000 --- a/stackit/internal/services/sqlserverflexalpha/testdata/resource-min.tf +++ /dev/null @@ -1,34 +0,0 @@ - -variable "project_id" {} -variable "name" {} -variable "flavor_cpu" {} -variable "flavor_ram" {} -variable "username" {} -variable "role" {} - -resource "stackit_sqlserverflex_instance" "instance" { - project_id = var.project_id - name = var.name - flavor = { - cpu = var.flavor_cpu - ram = var.flavor_ram - } -} - -resource "stackit_sqlserverflex_user" "user" { - project_id = stackit_sqlserverflex_instance.instance.project_id - instance_id = stackit_sqlserverflex_instance.instance.instance_id - username = var.username - roles = [var.role] -} - -data "stackit_sqlserverflex_instance" "instance" { - project_id = var.project_id - instance_id = stackit_sqlserverflex_instance.instance.instance_id -} - -data "stackit_sqlserverflex_user" "user" { - project_id = var.project_id - instance_id = stackit_sqlserverflex_instance.instance.instance_id - user_id = stackit_sqlserverflex_user.user.user_id -} diff --git a/stackit/provider.go b/stackit/provider.go index bf9e2b71..ce96cd47 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -523,9 +523,9 @@ func (p *Provider) DataSources(_ context.Context) []func() datasource.DataSource // Resources defines the resources implemented in the provider. func (p *Provider) Resources(_ context.Context) []func() resource.Resource { resources := []func() resource.Resource{ - postgresFlexAlphaDatabase.NewDatabaseResource, postgresFlexAlphaInstance.NewInstanceResource, postgresFlexAlphaUser.NewUserResource, + postgresFlexAlphaDatabase.NewDatabaseResource, sqlServerFlexAlphaInstance.NewInstanceResource, sqlserverFlexAlphaUser.NewUserResource, From 131e1700bb304be222085a8855c66478afba7508 Mon Sep 17 00:00:00 2001 From: Andre Harms Date: Wed, 11 Feb 2026 15:39:20 +0000 Subject: [PATCH 18/41] fix: change identity handling for user & database (#50) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/50 Reviewed-by: Marcel_Henselin Co-authored-by: Andre Harms Co-committed-by: Andre Harms --- .../databases_data_source_gen.go | 106 ++++-------------- .../postgresflexalpha/database/resource.go | 88 ++++++++++----- .../postgresflexalpha/user/resource.go | 67 ++++++----- .../sqlserverflexalpha/database/resource.go | 61 +++++----- .../sqlserverflexalpha/user/mapper.go | 4 +- .../sqlserverflexalpha/user/mapper_test.go | 32 +++--- .../sqlserverflexalpha/user/resource.go | 27 ++++- .../user/resources_gen/user_resource_gen.go | 4 +- .../sqlserverflexbeta/database/resource.go | 67 ++++++----- .../sqlserverflexbeta/instance/resource.go | 9 +- .../services/sqlserverflexbeta/user/mapper.go | 2 +- .../sqlserverflexbeta/user/mapper_test.go | 32 +++--- .../sqlserverflexbeta/user/resource.go | 16 ++- .../user/resources_gen/user_resource_gen.go | 4 +- 14 files changed, 278 insertions(+), 241 deletions(-) diff --git a/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go b/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go index a533e5ca..b8bc6010 100644 --- a/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go +++ b/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go @@ -23,11 +23,6 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema { "databases": schema.ListNestedAttribute{ NestedObject: schema.NestedAttributeObject{ Attributes: map[string]schema.Attribute{ - "created": schema.StringAttribute{ - Computed: true, - Description: "The data when the database was created in RFC3339 format.", - MarkdownDescription: "The data when the database was created in RFC3339 format.", - }, "id": schema.Int64Attribute{ Computed: true, Description: "The id of the database.", @@ -169,24 +164,6 @@ func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectV attributes := in.Attributes() - createdAttribute, ok := attributes["created"] - - if !ok { - diags.AddError( - "Attribute Missing", - `created is missing from object`) - - return nil, diags - } - - createdVal, ok := createdAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute)) - } - idAttribute, ok := attributes["id"] if !ok { @@ -246,11 +223,10 @@ func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectV } return DatabasesValue{ - Created: createdVal, - Id: idVal, - Name: nameVal, - Owner: ownerVal, - state: attr.ValueStateKnown, + Id: idVal, + Name: nameVal, + Owner: ownerVal, + state: attr.ValueStateKnown, }, diags } @@ -317,24 +293,6 @@ func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[strin return NewDatabasesValueUnknown(), diags } - createdAttribute, ok := attributes["created"] - - if !ok { - diags.AddError( - "Attribute Missing", - `created is missing from object`) - - return NewDatabasesValueUnknown(), diags - } - - createdVal, ok := createdAttribute.(basetypes.StringValue) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute)) - } - idAttribute, ok := attributes["id"] if !ok { @@ -394,11 +352,10 @@ func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[strin } return DatabasesValue{ - Created: createdVal, - Id: idVal, - Name: nameVal, - Owner: ownerVal, - state: attr.ValueStateKnown, + Id: idVal, + Name: nameVal, + Owner: ownerVal, + state: attr.ValueStateKnown, }, diags } @@ -470,20 +427,18 @@ func (t DatabasesType) ValueType(ctx context.Context) attr.Value { var _ basetypes.ObjectValuable = DatabasesValue{} type DatabasesValue struct { - Created basetypes.StringValue `tfsdk:"created"` - Id basetypes.Int64Value `tfsdk:"id"` - Name basetypes.StringValue `tfsdk:"name"` - Owner basetypes.StringValue `tfsdk:"owner"` - state attr.ValueState + Id basetypes.Int64Value `tfsdk:"id"` + Name basetypes.StringValue `tfsdk:"name"` + Owner basetypes.StringValue `tfsdk:"owner"` + state attr.ValueState } func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { - attrTypes := make(map[string]tftypes.Type, 4) + attrTypes := make(map[string]tftypes.Type, 3) var val tftypes.Value var err error - attrTypes["created"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx) attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx) @@ -492,15 +447,7 @@ func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, er switch v.state { case attr.ValueStateKnown: - vals := make(map[string]tftypes.Value, 4) - - val, err = v.Created.ToTerraformValue(ctx) - - if err != nil { - return tftypes.NewValue(objectType, tftypes.UnknownValue), err - } - - vals["created"] = val + vals := make(map[string]tftypes.Value, 3) val, err = v.Id.ToTerraformValue(ctx) @@ -556,10 +503,9 @@ func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValu var diags diag.Diagnostics attributeTypes := map[string]attr.Type{ - "created": basetypes.StringType{}, - "id": basetypes.Int64Type{}, - "name": basetypes.StringType{}, - "owner": basetypes.StringType{}, + "id": basetypes.Int64Type{}, + "name": basetypes.StringType{}, + "owner": basetypes.StringType{}, } if v.IsNull() { @@ -573,10 +519,9 @@ func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValu objVal, diags := types.ObjectValue( attributeTypes, map[string]attr.Value{ - "created": v.Created, - "id": v.Id, - "name": v.Name, - "owner": v.Owner, + "id": v.Id, + "name": v.Name, + "owner": v.Owner, }) return objVal, diags @@ -597,10 +542,6 @@ func (v DatabasesValue) Equal(o attr.Value) bool { return true } - if !v.Created.Equal(other.Created) { - return false - } - if !v.Id.Equal(other.Id) { return false } @@ -626,10 +567,9 @@ func (v DatabasesValue) Type(ctx context.Context) attr.Type { func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type { return map[string]attr.Type{ - "created": basetypes.StringType{}, - "id": basetypes.Int64Type{}, - "name": basetypes.StringType{}, - "owner": basetypes.StringType{}, + "id": basetypes.Int64Type{}, + "name": basetypes.StringType{}, + "owner": basetypes.StringType{}, } } diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index 64c62e70..e2013f29 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -178,18 +178,11 @@ func (r *databaseResource) Create( return } - // Read identity data - var identityData DatabaseResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.ProjectID.ValueString() - instanceId := identityData.InstanceID.ValueString() + projectId := model.ProjectId.ValueString() + region := model.InstanceId.ValueString() + instanceId := model.InstanceId.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) @@ -218,8 +211,6 @@ func (r *databaseResource) Create( return } - ctx = core.LogResponse(ctx) - if databaseResp == nil || databaseResp.Id == nil { core.LogAndAddError( ctx, @@ -231,6 +222,19 @@ func (r *databaseResource) Create( } databaseId := *databaseResp.Id ctx = tflog.SetField(ctx, "database_id", databaseId) + ctx = core.LogResponse(ctx) + + // Save identity into Terraform state + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseID: types.Int64Value(databaseId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } database, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId) if err != nil { @@ -255,18 +259,6 @@ func (r *databaseResource) Create( return } - // Set data returned by API in identity - identity := DatabaseResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - InstanceID: types.StringValue(instanceId), - DatabaseID: types.Int64Value(databaseId), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { - return - } - // Set state to fully populated data resp.Diagnostics.Append(resp.State.Set(ctx, model)...) if resp.Diagnostics.HasError() { @@ -337,6 +329,18 @@ func (r *databaseResource) Read( return } + // Save identity into Terraform state + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseID: types.Int64Value(databaseId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Set refreshed state diags = resp.State.Set(ctx, model) resp.Diagnostics.Append(diags...) @@ -415,7 +419,7 @@ func (r *databaseResource) Update( } // Update existing database - res, err := r.client.UpdateDatabasePartiallyRequest( + err := r.client.UpdateDatabasePartiallyRequest( ctx, projectId, region, @@ -430,19 +434,45 @@ func (r *databaseResource) Update( ctx = core.LogResponse(ctx) // Map response body to schema - err = mapResourceFields(res.Database, &model) + databaseResp, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId64) + if err != nil { + oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped + if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) { + resp.State.RemoveResource(ctx) + return + } + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + // Map response body to schema + err = mapResourceFields(databaseResp, &model) if err != nil { core.LogAndAddError( ctx, &resp.Diagnostics, - "Error updating database", + "Error reading database", fmt.Sprintf("Processing API payload: %v", err), ) return } + + // Save identity into Terraform state + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseID: types.Int64Value(databaseId64), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Set state to fully populated data - diags = resp.State.Set(ctx, model) - resp.Diagnostics.Append(diags...) + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) if resp.Diagnostics.HasError() { return } diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go index 842ccbc9..8984a4ea 100644 --- a/stackit/internal/services/postgresflexalpha/user/resource.go +++ b/stackit/internal/services/postgresflexalpha/user/resource.go @@ -159,14 +159,10 @@ func (r *userResource) Create( ctx = core.InitProviderContext(ctx) - arg, errExt := r.extractIdentityData(model, identityData) - if errExt != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - extractErrorSummary, - fmt.Sprintf(extractErrorMessage, errExt), - ) + arg := &clientArg{ + projectId: model.ProjectId.ValueString(), + instanceId: model.InstanceId.ValueString(), + region: r.providerData.GetRegionWithOverride(model.Region), } ctx = r.setTFLogFields(ctx, arg) @@ -204,11 +200,13 @@ func (r *userResource) Create( ) return } - model.Id = types.Int64PointerValue(userResp.Id) - model.UserId = types.Int64PointerValue(userResp.Id) - model.Password = types.StringPointerValue(userResp.Password) + model.Id = types.Int64Value(userResp.GetId()) + model.UserId = types.Int64Value(userResp.GetId()) + model.Password = types.StringValue(userResp.GetPassword()) + model.Status = types.StringValue(userResp.GetStatus()) + model.ConnectionString = types.StringValue(userResp.GetConnectionString()) - ctx = tflog.SetField(ctx, "user_id", *userResp.Id) + ctx = tflog.SetField(ctx, "user_id", userResp.GetId()) ctx = core.LogResponse(ctx) @@ -217,7 +215,7 @@ func (r *userResource) Create( ProjectID: types.StringValue(arg.projectId), Region: types.StringValue(arg.region), InstanceID: types.StringValue(arg.instanceId), - UserID: types.Int64PointerValue(userResp.Id), + UserID: types.Int64Value(userResp.GetId()), } resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) if resp.Diagnostics.HasError() { @@ -261,23 +259,12 @@ func (r *userResource) Read( return } - // Read identity data - var identityData UserResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) - arg, errExt := r.extractIdentityData(model, identityData) - if errExt != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - extractErrorSummary, - fmt.Sprintf(extractErrorMessage, errExt), - ) + arg := &clientArg{ + projectId: model.ProjectId.ValueString(), + instanceId: model.InstanceId.ValueString(), + region: r.providerData.GetRegionWithOverride(model.Region), } ctx = r.setTFLogFields(ctx, arg) @@ -299,6 +286,18 @@ func (r *userResource) Read( ctx = core.LogResponse(ctx) + // Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(arg.projectId), + Region: types.StringValue(arg.region), + InstanceID: types.StringValue(arg.instanceId), + UserID: types.Int64Value(arg.userId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Set refreshed state diags = resp.State.Set(ctx, model) resp.Diagnostics.Append(diags...) @@ -385,6 +384,18 @@ func (r *userResource) Update( ctx = core.LogResponse(ctx) + // Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(arg.projectId), + Region: types.StringValue(arg.region), + InstanceID: types.StringValue(arg.instanceId), + UserID: types.Int64Value(userId64), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Verify update exists, err := r.getUserResource(ctx, &stateModel, arg) diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index 0f4ce098..f95b57fb 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -155,18 +155,11 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - // Read identity data - var identityData DatabaseResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.ProjectID.ValueString() - instanceId := identityData.InstanceID.ValueString() + projectId := model.ProjectId.ValueString() + region := model.Region.ValueString() + instanceId := model.InstanceId.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) @@ -215,6 +208,18 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques ctx = core.LogResponse(ctx) + // Set data returned by API in identity + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseName: types.StringValue(databaseName), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + database, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() if err != nil { core.LogAndAddError( @@ -238,18 +243,6 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - // Set data returned by API in identity - identity := DatabaseResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - InstanceID: types.StringValue(instanceId), - DatabaseName: types.StringValue(databaseName), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { - return - } - // Set state to fully populated data resp.Diagnostics.Append(resp.State.Set(ctx, model)...) if resp.Diagnostics.HasError() { @@ -316,9 +309,20 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r return } + // Set data returned by API in identity + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseName: types.StringValue(databaseName), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Set refreshed state - diags = resp.State.Set(ctx, model) - resp.Diagnostics.Append(diags...) + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) if resp.Diagnostics.HasError() { return } @@ -371,6 +375,7 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques } ctx = core.LogResponse(ctx) + resp.State.RemoveResource(ctx) tflog.Info(ctx, "sqlserverflexalpha.Database deleted") } @@ -382,7 +387,7 @@ func (r *databaseResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - var configModel sqlserverflexalphaGen.DatabaseModel + var configModel resourceModel // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return @@ -392,7 +397,7 @@ func (r *databaseResource) ModifyPlan( return } - var planModel sqlserverflexalphaGen.DatabaseModel + var planModel resourceModel resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) if resp.Diagnostics.HasError() { return @@ -485,8 +490,8 @@ func (r *databaseResource) extractIdentityData( model resourceModel, identity DatabaseResourceIdentityModel, ) (projectId, region, instanceId, databaseName string, err error) { - if !model.DatabaseName.IsNull() && !model.DatabaseName.IsUnknown() { - databaseName = model.DatabaseName.ValueString() + if !model.Name.IsNull() && !model.Name.IsUnknown() { + databaseName = model.Name.ValueString() } else { if identity.DatabaseName.IsNull() || identity.DatabaseName.IsUnknown() { return "", "", "", "", fmt.Errorf("database_name not found in config") diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper.go b/stackit/internal/services/sqlserverflexalpha/user/mapper.go index 2035029b..0b7baa84 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper.go @@ -140,7 +140,7 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *res if user.Roles != nil { var roles []attr.Value for _, role := range *user.Roles { - roles = append(roles, types.StringValue(string(role))) + roles = append(roles, types.StringValue(role)) } rolesSet, diags := types.SetValue(types.StringType, roles) if diags.HasError() { @@ -165,7 +165,7 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *res // toCreatePayload converts a resourceModel to an API CreateUserRequestPayload. func toCreatePayload( model *resourceModel, - roles []sqlserverflexalpha.UserRole, + roles []string, ) (*sqlserverflexalpha.CreateUserRequestPayload, error) { if model == nil { return nil, fmt.Errorf("nil model") diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go index c97fee9c..e450b581 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go @@ -42,7 +42,7 @@ func TestMapDataSourceFields(t *testing.T) { "simple_values", &sqlserverflexalpha.GetUserResponse{ - Roles: &[]sqlserverflexalpha.UserRole{ + Roles: &[]string{ "role_1", "role_2", "", @@ -79,7 +79,7 @@ func TestMapDataSourceFields(t *testing.T) { "null_fields_and_int_conversions", &sqlserverflexalpha.GetUserResponse{ Id: utils.Ptr(int64(1)), - Roles: &[]sqlserverflexalpha.UserRole{}, + Roles: &[]string{}, Username: nil, Host: nil, Port: utils.Ptr(int64(2123456789)), @@ -180,7 +180,7 @@ func TestMapFieldsCreate(t *testing.T) { "simple_values", &sqlserverflexalpha.CreateUserResponse{ Id: utils.Ptr(int64(2)), - Roles: &[]sqlserverflexalpha.UserRole{ + Roles: &[]string{ "role_1", "role_2", "", @@ -221,7 +221,7 @@ func TestMapFieldsCreate(t *testing.T) { "null_fields_and_int_conversions", &sqlserverflexalpha.CreateUserResponse{ Id: utils.Ptr(int64(3)), - Roles: &[]sqlserverflexalpha.UserRole{}, + Roles: &[]string{}, Username: nil, Password: utils.Ptr(""), Host: nil, @@ -329,7 +329,7 @@ func TestMapFields(t *testing.T) { { "simple_values", &sqlserverflexalpha.GetUserResponse{ - Roles: &[]sqlserverflexalpha.UserRole{ + Roles: &[]string{ "role_1", "role_2", "", @@ -364,7 +364,7 @@ func TestMapFields(t *testing.T) { "null_fields_and_int_conversions", &sqlserverflexalpha.GetUserResponse{ Id: utils.Ptr(int64(1)), - Roles: &[]sqlserverflexalpha.UserRole{}, + Roles: &[]string{}, Username: nil, Host: nil, Port: utils.Ptr(int64(2123456789)), @@ -435,16 +435,16 @@ func TestToCreatePayload(t *testing.T) { tests := []struct { description string input *resourceModel - inputRoles []sqlserverflexalpha.UserRole + inputRoles []string expected *sqlserverflexalpha.CreateUserRequestPayload isValid bool }{ { "default_values", &resourceModel{}, - []sqlserverflexalpha.UserRole{}, + []string{}, &sqlserverflexalpha.CreateUserRequestPayload{ - Roles: &[]sqlserverflexalpha.UserRole{}, + Roles: &[]string{}, Username: nil, }, true, @@ -454,12 +454,12 @@ func TestToCreatePayload(t *testing.T) { &resourceModel{ Username: types.StringValue("username"), }, - []sqlserverflexalpha.UserRole{ + []string{ "role_1", "role_2", }, &sqlserverflexalpha.CreateUserRequestPayload{ - Roles: &[]sqlserverflexalpha.UserRole{ + Roles: &[]string{ "role_1", "role_2", }, @@ -472,11 +472,11 @@ func TestToCreatePayload(t *testing.T) { &resourceModel{ Username: types.StringNull(), }, - []sqlserverflexalpha.UserRole{ + []string{ "", }, &sqlserverflexalpha.CreateUserRequestPayload{ - Roles: &[]sqlserverflexalpha.UserRole{ + Roles: &[]string{ "", }, Username: nil, @@ -486,7 +486,7 @@ func TestToCreatePayload(t *testing.T) { { "nil_model", nil, - []sqlserverflexalpha.UserRole{}, + []string{}, nil, false, }, @@ -495,9 +495,9 @@ func TestToCreatePayload(t *testing.T) { &resourceModel{ Username: types.StringValue("username"), }, - []sqlserverflexalpha.UserRole{}, + []string{}, &sqlserverflexalpha.CreateUserRequestPayload{ - Roles: &[]sqlserverflexalpha.UserRole{}, + Roles: &[]string{}, Username: utils.Ptr("username"), }, true, diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index 3a95c862..52dee432 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -179,7 +179,7 @@ func (r *userResource) Create( ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "region", region) - var roles []sqlserverflexalpha.UserRole + var roles []string if !model.Roles.IsNull() && !model.Roles.IsUnknown() { diags = model.Roles.ElementsAs(ctx, &roles, false) resp.Diagnostics.Append(diags...) @@ -220,6 +220,18 @@ func (r *userResource) Create( userId := *userResp.Id ctx = tflog.SetField(ctx, "user_id", userId) + // Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + UserID: types.Int64Value(userResp.GetId()), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Map response body to schema err = mapFieldsCreate(userResp, &model, region) if err != nil { @@ -282,6 +294,18 @@ func (r *userResource) Read( ctx = core.LogResponse(ctx) + // Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + UserID: types.Int64Value(userId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Map response body to schema err = mapFields(recordSetResp, &model, region) if err != nil { @@ -351,6 +375,7 @@ func (r *userResource) Delete( } ctx = core.LogResponse(ctx) + resp.State.RemoveResource(ctx) tflog.Info(ctx, "SQLServer Flex user deleted") } diff --git a/stackit/internal/services/sqlserverflexalpha/user/resources_gen/user_resource_gen.go b/stackit/internal/services/sqlserverflexalpha/user/resources_gen/user_resource_gen.go index 2b456e79..b316b020 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resources_gen/user_resource_gen.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resources_gen/user_resource_gen.go @@ -66,8 +66,8 @@ func UserResourceSchema(ctx context.Context) schema.Schema { "roles": schema.ListAttribute{ ElementType: types.StringType, Required: true, - Description: "A list containing the user roles for the instance.", - MarkdownDescription: "A list containing the user roles for the instance.", + Description: "A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.", + MarkdownDescription: "A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.", }, "status": schema.StringAttribute{ Computed: true, diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 8e09975b..1f57355d 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -69,7 +69,7 @@ func (r *databaseResource) Metadata( //go:embed planModifiers.yaml var modifiersFileByte []byte -func (r *databaseResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { +func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { s := sqlserverflexbetaResGen.DatabaseResourceSchema(ctx) @@ -160,23 +160,16 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - // Read identity data - var identityData DatabaseResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) - projectId := identityData.ProjectID.ValueString() - region := identityData.Region.ValueString() - instanceId := identityData.InstanceID.ValueString() + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() + instanceId := data.InstanceId.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "instance_id", instanceId) - databaseName := identityData.DatabaseName.ValueString() + databaseName := data.Name.ValueString() ctx = tflog.SetField(ctx, "database_name", databaseName) payLoad := sqlserverflexbeta.CreateDatabaseRequestPayload{ @@ -215,6 +208,18 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques ctx = core.LogResponse(ctx) + // Set data returned by API in identity + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseName: types.StringValue(databaseName), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // TODO: is this neccessary to wait for the database-> API say 200 ? /*waitResp, err := wait.CreateDatabaseWaitHandler( ctx, @@ -301,18 +306,6 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - // Set data returned by API in identity - identity := DatabaseResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - InstanceID: types.StringValue(instanceId), - DatabaseName: types.StringValue(databaseName), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { - return - } - // Set state to fully populated data resp.Diagnostics.Append(resp.State.Set(ctx, data)...) if resp.Diagnostics.HasError() { @@ -381,9 +374,20 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r return } + // Save identity into Terraform state + identity := DatabaseResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + DatabaseName: types.StringValue(databaseName), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Set refreshed state - diags = resp.State.Set(ctx, model) - resp.Diagnostics.Append(diags...) + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) if resp.Diagnostics.HasError() { return } @@ -391,7 +395,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r tflog.Info(ctx, "sqlserverflexbeta.Database read") } -func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { +func (r *databaseResource) Update(ctx context.Context, _ resource.UpdateRequest, resp *resource.UpdateResponse) { // TODO: Check update api endpoint - not available at the moment, so return an error for now core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating database", "Database can't be updated") } @@ -436,6 +440,7 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques } ctx = core.LogResponse(ctx) + resp.State.RemoveResource(ctx) tflog.Info(ctx, "sqlserverflexbeta.Database deleted") } @@ -453,13 +458,13 @@ func (r *databaseResource) ModifyPlan( return } - var configModel sqlserverflexbetaResGen.DatabaseModel + var configModel resourceModel resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) if resp.Diagnostics.HasError() { return } - var planModel sqlserverflexbetaResGen.DatabaseModel + var planModel resourceModel resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) if resp.Diagnostics.HasError() { return @@ -562,8 +567,8 @@ func (r *databaseResource) extractIdentityData( model resourceModel, identity DatabaseResourceIdentityModel, ) (projectId, region, instanceId, databaseName string, err error) { - if !model.DatabaseName.IsNull() && !model.DatabaseName.IsUnknown() { - databaseName = model.DatabaseName.ValueString() + if !model.Name.IsNull() && !model.Name.IsUnknown() { + databaseName = model.Name.ValueString() } else { if identity.DatabaseName.IsNull() || identity.DatabaseName.IsUnknown() { return "", "", "", "", fmt.Errorf("database_name not found in config") diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go index 5ccfa3c0..741424a0 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -160,7 +160,7 @@ func (r *instanceResource) ModifyPlan( if req.Plan.Raw.IsNull() { return } - var planModel sqlserverflexbetaResGen.InstanceModel + var planModel resourceModel resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) if resp.Diagnostics.HasError() { return @@ -334,6 +334,7 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r return } + // Save identity into Terraform state identity := InstanceResourceIdentityModel{ ProjectID: types.StringValue(projectId), Region: types.StringValue(region), @@ -346,6 +347,9 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r // Save updated data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } tflog.Info(ctx, "sqlserverflexbeta.Instance read") } @@ -433,6 +437,9 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques // Save updated data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } tflog.Info(ctx, "sqlserverflexbeta.Instance updated") } diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper.go b/stackit/internal/services/sqlserverflexbeta/user/mapper.go index d2324058..bddea43a 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper.go @@ -165,7 +165,7 @@ func mapFieldsCreate(userResp *sqlserverflexbeta.CreateUserResponse, model *reso // toCreatePayload converts a resourceModel to an API CreateUserRequestPayload. func toCreatePayload( model *resourceModel, - roles []sqlserverflexbeta.UserRole, + roles []string, ) (*sqlserverflexbeta.CreateUserRequestPayload, error) { if model == nil { return nil, fmt.Errorf("nil model") diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go index 23c6121c..7b23805c 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go @@ -42,7 +42,7 @@ func TestMapDataSourceFields(t *testing.T) { "simple_values", &sqlserverflexbeta.GetUserResponse{ - Roles: &[]sqlserverflexbeta.UserRole{ + Roles: &[]string{ "role_1", "role_2", "", @@ -81,7 +81,7 @@ func TestMapDataSourceFields(t *testing.T) { "null_fields_and_int_conversions", &sqlserverflexbeta.GetUserResponse{ Id: utils.Ptr(int64(1)), - Roles: &[]sqlserverflexbeta.UserRole{}, + Roles: &[]string{}, Username: nil, Host: nil, Port: utils.Ptr(int64(2123456789)), @@ -182,7 +182,7 @@ func TestMapFieldsCreate(t *testing.T) { "simple_values", &sqlserverflexbeta.CreateUserResponse{ Id: utils.Ptr(int64(2)), - Roles: &[]sqlserverflexbeta.UserRole{ + Roles: &[]string{ "role_1", "role_2", "", @@ -223,7 +223,7 @@ func TestMapFieldsCreate(t *testing.T) { "null_fields_and_int_conversions", &sqlserverflexbeta.CreateUserResponse{ Id: utils.Ptr(int64(3)), - Roles: &[]sqlserverflexbeta.UserRole{}, + Roles: &[]string{}, Username: nil, Password: utils.Ptr(""), Host: nil, @@ -331,7 +331,7 @@ func TestMapFields(t *testing.T) { { "simple_values", &sqlserverflexbeta.GetUserResponse{ - Roles: &[]sqlserverflexbeta.UserRole{ + Roles: &[]string{ "role_1", "role_2", "", @@ -366,7 +366,7 @@ func TestMapFields(t *testing.T) { "null_fields_and_int_conversions", &sqlserverflexbeta.GetUserResponse{ Id: utils.Ptr(int64(1)), - Roles: &[]sqlserverflexbeta.UserRole{}, + Roles: &[]string{}, Username: nil, Host: nil, Port: utils.Ptr(int64(2123456789)), @@ -437,16 +437,16 @@ func TestToCreatePayload(t *testing.T) { tests := []struct { description string input *resourceModel - inputRoles []sqlserverflexbeta.UserRole + inputRoles []string expected *sqlserverflexbeta.CreateUserRequestPayload isValid bool }{ { "default_values", &resourceModel{}, - []sqlserverflexbeta.UserRole{}, + []string{}, &sqlserverflexbeta.CreateUserRequestPayload{ - Roles: &[]sqlserverflexbeta.UserRole{}, + Roles: &[]string{}, Username: nil, }, true, @@ -456,12 +456,12 @@ func TestToCreatePayload(t *testing.T) { &resourceModel{ Username: types.StringValue("username"), }, - []sqlserverflexbeta.UserRole{ + []string{ "role_1", "role_2", }, &sqlserverflexbeta.CreateUserRequestPayload{ - Roles: &[]sqlserverflexbeta.UserRole{ + Roles: &[]string{ "role_1", "role_2", }, @@ -474,11 +474,11 @@ func TestToCreatePayload(t *testing.T) { &resourceModel{ Username: types.StringNull(), }, - []sqlserverflexbeta.UserRole{ + []string{ "", }, &sqlserverflexbeta.CreateUserRequestPayload{ - Roles: &[]sqlserverflexbeta.UserRole{ + Roles: &[]string{ "", }, Username: nil, @@ -488,7 +488,7 @@ func TestToCreatePayload(t *testing.T) { { "nil_model", nil, - []sqlserverflexbeta.UserRole{}, + []string{}, nil, false, }, @@ -497,9 +497,9 @@ func TestToCreatePayload(t *testing.T) { &resourceModel{ Username: types.StringValue("username"), }, - []sqlserverflexbeta.UserRole{}, + []string{}, &sqlserverflexbeta.CreateUserRequestPayload{ - Roles: &[]sqlserverflexbeta.UserRole{}, + Roles: &[]string{}, Username: utils.Ptr("username"), }, true, diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index 03981b93..49850497 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -177,7 +177,7 @@ func (r *userResource) Create( ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "region", region) - var roles []sqlserverflexbeta.UserRole + var roles []string if !model.Roles.IsNull() && !model.Roles.IsUnknown() { diags = model.Roles.ElementsAs(ctx, &roles, false) resp.Diagnostics.Append(diags...) @@ -292,6 +292,18 @@ func (r *userResource) Read( return } + // Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + UserID: types.Int64Value(userId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Set refreshed state diags = resp.State.Set(ctx, model) resp.Diagnostics.Append(diags...) @@ -350,6 +362,8 @@ func (r *userResource) Delete( ctx = core.LogResponse(ctx) + resp.State.RemoveResource(ctx) + tflog.Info(ctx, "SQLServer Flex user deleted") } diff --git a/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go index 3bf7f4fc..f181f79c 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go @@ -66,8 +66,8 @@ func UserResourceSchema(ctx context.Context) schema.Schema { "roles": schema.ListAttribute{ ElementType: types.StringType, Required: true, - Description: "A list containing the user roles for the instance.", - MarkdownDescription: "A list containing the user roles for the instance.", + Description: "A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.", + MarkdownDescription: "A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.", }, "status": schema.StringAttribute{ Computed: true, From 0c9ecfc6702229a8ef665d6dd0b27c5f7863688a Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Thu, 12 Feb 2026 11:42:37 +0000 Subject: [PATCH 19/41] fix: sqlserver beta fixes (#51) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/51 Reviewed-by: Andre_Harms Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../sqlserverflexalpha/database/resource.go | 4 +- .../sqlserverflexbeta/database/mapper.go | 3 + .../sqlserverflexbeta/database/resource.go | 42 ++++- .../sqlserverflexbeta/instance/resource.go | 2 +- .../sqlserverflex_acc_test.go | 9 +- .../testdata/instance_template.gompl | 11 +- .../services/sqlserverflexbeta/user/mapper.go | 3 + .../sqlserverflexbeta/user/resource.go | 84 ++++++++- .../internal/wait/sqlserverflexbeta/wait.go | 168 ++++++++++++++++-- 9 files changed, 291 insertions(+), 35 deletions(-) diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index f95b57fb..0ffba164 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -269,7 +269,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r ctx = core.InitProviderContext(ctx) - projectId, instanceId, region, databaseName, errExt := r.extractIdentityData(model, identityData) + projectId, region, instanceId, databaseName, errExt := r.extractIdentityData(model, identityData) if errExt != nil { core.LogAndAddError( ctx, @@ -353,7 +353,7 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques ctx = core.InitProviderContext(ctx) - projectId, instanceId, region, databaseName, errExt := r.extractIdentityData(model, identityData) + projectId, region, instanceId, databaseName, errExt := r.extractIdentityData(model, identityData) if errExt != nil { core.LogAndAddError( ctx, diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper.go b/stackit/internal/services/sqlserverflexbeta/database/mapper.go index 51772af1..349f7717 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/database/mapper.go @@ -79,6 +79,9 @@ func mapResourceFields(source *sqlserverflexbeta.GetDatabaseResponse, model *res model.ProjectId = types.StringValue(model.ProjectId.ValueString()) model.InstanceId = types.StringValue(model.InstanceId.ValueString()) + model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel()) + model.CollationName = types.StringValue(source.GetCollationName()) + return nil } diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 1f57355d..652575fc 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -7,6 +7,7 @@ import ( "fmt" "net/http" "strings" + "time" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" @@ -17,6 +18,7 @@ import ( "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" @@ -59,7 +61,7 @@ type DatabaseResourceIdentityModel struct { } func (r *databaseResource) Metadata( - ctx context.Context, + _ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse, ) { @@ -179,6 +181,26 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques Owner: data.Owner.ValueStringPointer(), } + _, err := wait.WaitForUserWaitHandler( + ctx, + r.client, + projectId, + instanceId, + region, + data.Owner.ValueString(), + ). + SetSleepBeforeWait(10 * time.Second). + WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + fmt.Sprintf("Calling API: %v", err), + ) + return + } + createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId). CreateDatabaseRequestPayload(payLoad). Execute() @@ -221,7 +243,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques } // TODO: is this neccessary to wait for the database-> API say 200 ? - /*waitResp, err := wait.CreateDatabaseWaitHandler( + waitResp, err := wait.CreateDatabaseWaitHandler( ctx, r.client, projectId, @@ -253,7 +275,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - if *waitResp.Id != createId { + if *waitResp.Id != databaseId { core.LogAndAddError( ctx, &resp.Diagnostics, @@ -281,7 +303,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques "Database creation waiting: returned name is different", ) return - }*/ + } database, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() if err != nil { @@ -334,7 +356,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r ctx = core.InitProviderContext(ctx) - projectId, instanceId, region, databaseName, errExt := r.extractIdentityData(model, identityData) + projectId, region, instanceId, databaseName, errExt := r.extractIdentityData(model, identityData) if errExt != nil { core.LogAndAddError( ctx, @@ -418,7 +440,7 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques ctx = core.InitProviderContext(ctx) - projectId, instanceId, region, databaseName, errExt := r.extractIdentityData(model, identityData) + projectId, region, instanceId, databaseName, errExt := r.extractIdentityData(model, identityData) if errExt != nil { core.LogAndAddError( ctx, @@ -436,7 +458,13 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques // Delete existing record set err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error deleting database", + fmt.Sprintf( + "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId)) + return } ctx = core.LogResponse(ctx) diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go index 741424a0..6abcbbd3 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -241,7 +241,7 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques InstanceId, region, ).SetSleepBeforeWait( - 30 * time.Second, + 10 * time.Second, ).SetTimeout( 90 * time.Minute, ).WaitWithContext(ctx) diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go index 9a3bb1b7..9c091cde 100644 --- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go @@ -215,6 +215,7 @@ func TestAccInstanceWithUsers(t *testing.T) { func TestAccInstanceWithDatabases(t *testing.T) { data := getExample() + t.Logf(" ... working on instance %s", data.TfName) dbName := "testDb" userName := "testUser" data.Users = []User{ @@ -258,13 +259,15 @@ func TestAccInstanceWithDatabases(t *testing.T) { data, ), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", data.Name), resource.TestCheckResourceAttrSet(resName, "id"), - resource.TestCheckResourceAttr(resUserName, "name", userName), + resource.TestCheckResourceAttr(resName, "name", data.Name), + resource.TestCheckResourceAttrSet(resUserName, "id"), + resource.TestCheckResourceAttr(resUserName, "username", userName), + + resource.TestCheckResourceAttrSet(resDbName, "id"), resource.TestCheckResourceAttr(resDbName, "name", dbName), resource.TestCheckResourceAttr(resDbName, "owner", userName), - resource.TestCheckResourceAttrSet(resDbName, "id"), ), }, }, diff --git a/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl b/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl index ddc95138..84ca19c1 100644 --- a/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl +++ b/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl @@ -44,10 +44,13 @@ resource "stackitprivatepreview_sqlserverflexbeta_user" "{{ $user.Name }}" { {{ $tfName := .TfName }} {{ range $db := .Databases }} resource "stackitprivatepreview_sqlserverflexbeta_database" "{{ $db.Name }}" { - project_id = "{{ $db.ProjectId }}" - instance_id = stackitprivatepreview_sqlserverflexbeta_instance.{{ $tfName }}.instance_id - name = "{{ $db.Name }}" - owner = "{{ $db.Owner }}" + depends_on = [stackitprivatepreview_sqlserverflexbeta_user.{{ $db.Owner }}] + project_id = "{{ $db.ProjectId }}" + instance_id = stackitprivatepreview_sqlserverflexbeta_instance.{{ $tfName }}.instance_id + name = "{{ $db.Name }}" + owner = "{{ $db.Owner }}" + collation = "Albanian_BIN" + compatibility = "160" } {{ end }} {{ end }} diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper.go b/stackit/internal/services/sqlserverflexbeta/user/mapper.go index bddea43a..59c915bb 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper.go @@ -153,6 +153,9 @@ func mapFieldsCreate(userResp *sqlserverflexbeta.CreateUserResponse, model *reso model.Roles = types.List(types.SetNull(types.StringType)) } + model.Password = types.StringPointerValue(user.Password) + model.Uri = types.StringPointerValue(user.Uri) + model.Host = types.StringPointerValue(user.Host) model.Port = types.Int64PointerValue(user.Port) model.Region = types.StringValue(region) diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index 49850497..a31c380b 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -8,6 +8,7 @@ import ( "net/http" "strconv" "strings" + "time" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" @@ -47,7 +48,7 @@ type UserResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` InstanceID types.String `tfsdk:"instance_id"` - UserID types.Int64 `tfsdk:"database_id"` + UserID types.Int64 `tfsdk:"user_id"` } type userResource struct { @@ -215,10 +216,22 @@ func (r *userResource) Create( ) return } + userId := *userResp.Id ctx = tflog.SetField(ctx, "user_id", userId) - // Map response body to schema + // Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + UserID: types.Int64Value(userId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + err = mapFieldsCreate(userResp, &model, region) if err != nil { core.LogAndAddError( @@ -229,6 +242,51 @@ func (r *userResource) Create( ) return } + + waitResp, err := sqlserverflexbetaWait.CreateUserWaitHandler( + ctx, + r.client, + projectId, + instanceId, + region, + userId, + ).SetSleepBeforeWait( + 90 * time.Second, + ).SetTimeout( + 90 * time.Minute, + ).WaitWithContext(ctx) + + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "create user", + fmt.Sprintf("Instance creation waiting: %v", err), + ) + return + } + + if waitResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "create user", + "Instance creation waiting: returned id is nil", + ) + return + } + + // Map response body to schema + err = mapFields(waitResp, &model, region) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating user", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } // Set state to fully populated data diags = resp.State.Set(ctx, model) resp.Diagnostics.Append(diags...) @@ -350,9 +408,29 @@ func (r *userResource) Delete( // Delete existing record set // err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute() + err := r.client.DeleteUserRequestExecute(ctx, projectId, region, instanceId, userId) + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + // TODO err handling + return + } + switch oapiErr.StatusCode { + case http.StatusNotFound: + resp.State.RemoveResource(ctx) + return + // case http.StatusInternalServerError: + // tflog.Warn(ctx, "[delete user] Wait handler got error 500") + // return false, nil, nil + default: + // TODO err handling + return + } + } // Delete existing record set - _, err := sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). + _, err = sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). WaitWithContext(ctx) //err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() if err != nil { diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go index 3bef0c64..b6a5bfcd 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait.go @@ -45,6 +45,22 @@ type APIClientInterface interface { instanceId string, userId int64, ) (*sqlserverflex.GetUserResponse, error) + + ListRolesRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + ) (*sqlserverflex.ListRolesResponse, error) + + ListUsersRequest(ctx context.Context, projectId string, region string, instanceId string) sqlserverflex.ApiListUsersRequestRequest + + ListUsersRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + ) (*sqlserverflex.ListUserResponse, error) } // APIClientUserInterface Interface needed for tests @@ -85,6 +101,54 @@ func CreateInstanceWaitHandler( return false, nil, nil } } + + tflog.Info(ctx, "trying to get roles") + time.Sleep(10 * time.Second) + _, rolesErr := a.ListRolesRequestExecute(ctx, projectId, region, instanceId) + if rolesErr != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(rolesErr, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusInternalServerError { + tflog.Info( + ctx, "got error from api", map[string]interface{}{ + "error": rolesErr.Error(), + }, + ) + return false, nil, rolesErr + } + tflog.Info( + ctx, "wait for get-roles to work hack", map[string]interface{}{}, + ) + time.Sleep(10 * time.Second) + return false, nil, nil + } + + tflog.Info(ctx, "trying to get users") + time.Sleep(10 * time.Second) + _, usersErr := a.ListUsersRequestExecute(ctx, projectId, region, instanceId) + if usersErr != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(usersErr, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusInternalServerError { + tflog.Info( + ctx, "got error from api", map[string]interface{}{ + "error": rolesErr.Error(), + }, + ) + return false, nil, usersErr + } + tflog.Info( + ctx, "wait for get-users to work hack", map[string]interface{}{}, + ) + time.Sleep(10 * time.Second) + return false, nil, nil + } return true, s, nil case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): return true, s, fmt.Errorf("create failed for instance with id %s", instanceId) @@ -94,6 +158,7 @@ func CreateInstanceWaitHandler( "status": *s.Status, }, ) + time.Sleep(10 * time.Second) return false, nil, nil default: tflog.Info( @@ -187,20 +252,96 @@ func CreateDatabaseWaitHandler( func() (waitFinished bool, response *sqlserverflex.GetDatabaseResponse, err error) { s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName) if err != nil { - return false, nil, err - } - if s == nil || s.Name == nil || *s.Name != databaseName { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("get database - could not convert error to oapierror.GenericOpenAPIError: %s", err.Error()) + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } return false, nil, nil } - var oapiErr *oapierror.GenericOpenAPIError - ok := errors.As(err, &oapiErr) + if s == nil || s.Name == nil || *s.Name != databaseName { + return false, nil, errors.New("response did return different result") + } + return true, s, nil + }, + ) + return handler +} + +// CreateUserWaitHandler will wait for instance creation +func CreateUserWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region string, + userId int64, +) *wait.AsyncActionHandler[sqlserverflex.GetUserResponse] { + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetUserResponse, err error) { + s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId) + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return false, nil, nil + } + return true, s, nil + }, + ) + return handler +} + +// WaitForUserWaitHandler will wait for instance creation +func WaitForUserWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region, userName string, +) *wait.AsyncActionHandler[sqlserverflex.ListUserResponse] { + startTime := time.Now() + timeOut := 2 * time.Minute + + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.ListUserResponse, err error) { + if time.Since(startTime) > timeOut { + return false, nil, errors.New("ran into timeout") + } + s, err := a.ListUsersRequest(ctx, projectId, region, instanceId).Size(100).Execute() + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("Wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", err.Error()) + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + tflog.Info( + ctx, "Wait (list users) still waiting", map[string]interface{}{}, + ) + + return false, nil, nil + } + users, ok := s.GetUsersOk() if !ok { - return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + return false, nil, errors.New("no users found") } - if oapiErr.StatusCode != http.StatusNotFound { - return false, nil, err + + for _, u := range users { + if u.GetUsername() == userName { + return true, s, nil + } } - return true, nil, nil + tflog.Info( + ctx, "Wait (list users) user still not present", map[string]interface{}{}, + ) + return false, nil, nil }, ) return handler @@ -209,13 +350,13 @@ func CreateDatabaseWaitHandler( // DeleteUserWaitHandler will wait for instance deletion func DeleteUserWaitHandler( ctx context.Context, - a APIClientUserInterface, - projectId, instanceId, region string, + a APIClientInterface, + projectId, region, instanceId string, userId int64, ) *wait.AsyncActionHandler[struct{}] { handler := wait.New( func() (waitFinished bool, response *struct{}, err error) { - err = a.DeleteUserRequestExecute(ctx, projectId, region, instanceId, userId) + _, err = a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId) if err == nil { return false, nil, nil } @@ -228,9 +369,6 @@ func DeleteUserWaitHandler( switch oapiErr.StatusCode { case http.StatusNotFound: return true, nil, nil - case http.StatusInternalServerError: - tflog.Warn(ctx, "Wait handler got error 500") - return false, nil, nil default: return false, nil, err } From 82c654f3ba07f1b422e02e00e45a341813e35bb0 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Thu, 12 Feb 2026 12:03:14 +0000 Subject: [PATCH 20/41] fix: publisher - create versions file correctly (#52) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/52 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- cmd/cmd/publish/provider.go | 2 +- cmd/cmd/publish/versions.go | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/cmd/cmd/publish/provider.go b/cmd/cmd/publish/provider.go index d581fbe3..36d7af1d 100644 --- a/cmd/cmd/publish/provider.go +++ b/cmd/cmd/publish/provider.go @@ -143,7 +143,7 @@ func (p *Provider) createVersionsFile() error { // Build the versions file... version := Version{ Version: p.Version, - Protocols: []string{"5.1", "6.0"}, + Protocols: []string{"5.1", "6.1"}, Platforms: nil, } for _, sum := range shasums { diff --git a/cmd/cmd/publish/versions.go b/cmd/cmd/publish/versions.go index 4145612a..397afa15 100644 --- a/cmd/cmd/publish/versions.go +++ b/cmd/cmd/publish/versions.go @@ -22,10 +22,14 @@ type Platform struct { } type Data struct { + Id string `json:"id,omitempty"` Versions []Version `json:"versions"` } func (d *Data) WriteToFile(filePath string) error { + // TODO: make it variable + d.Id = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview" + jsonString, err := json.Marshal(d) if err != nil { return fmt.Errorf("error encoding data: %w", err) From 459120d3b3042279b3bbebe2e0a5312ac1aad422 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Thu, 12 Feb 2026 15:14:53 +0000 Subject: [PATCH 21/41] fix: sqlserver return values mapping (#53) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/53 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../sqlserverflexbeta/database/mapper.go | 3 + .../sqlserverflexbeta/database/resource.go | 15 +- .../sqlserverflex_acc_test.go | 240 ++++++++++++------ .../testdata/instance_template.gompl | 12 +- 4 files changed, 188 insertions(+), 82 deletions(-) diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper.go b/stackit/internal/services/sqlserverflexbeta/database/mapper.go index 349f7717..7d82039a 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/database/mapper.go @@ -79,7 +79,10 @@ func mapResourceFields(source *sqlserverflexbeta.GetDatabaseResponse, model *res model.ProjectId = types.StringValue(model.ProjectId.ValueString()) model.InstanceId = types.StringValue(model.InstanceId.ValueString()) + model.Compatibility = types.Int64Value(source.GetCompatibilityLevel()) model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel()) + + model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api model.CollationName = types.StringValue(source.GetCollationName()) return nil diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 652575fc..7c191e42 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -174,13 +174,18 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques databaseName := data.Name.ValueString() ctx = tflog.SetField(ctx, "database_name", databaseName) - payLoad := sqlserverflexbeta.CreateDatabaseRequestPayload{ - Collation: data.Collation.ValueStringPointer(), - Compatibility: data.Compatibility.ValueInt64Pointer(), - Name: data.Name.ValueStringPointer(), - Owner: data.Owner.ValueStringPointer(), + payLoad := sqlserverflexbeta.CreateDatabaseRequestPayload{} + if !data.Collation.IsNull() && !data.Collation.IsUnknown() { + payLoad.Collation = data.Collation.ValueStringPointer() } + if !data.Compatibility.IsNull() && !data.Compatibility.IsUnknown() { + payLoad.Compatibility = data.Compatibility.ValueInt64Pointer() + } + + payLoad.Name = data.Name.ValueStringPointer() + payLoad.Owner = data.Owner.ValueStringPointer() + _, err := wait.WaitForUserWaitHandler( ctx, r.client, diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go index 9c091cde..e6c776dc 100644 --- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" @@ -19,6 +20,8 @@ import ( fwresource "github.com/hashicorp/terraform-plugin-framework/resource" ) +const providerPrefix = "stackitprivatepreview_sqlserverflexbeta" + func TestInstanceResourceSchema(t *testing.T) { t.Parallel() @@ -84,9 +87,15 @@ type User struct { } type Database struct { - Name string - ProjectId string - Owner string + Name string + ProjectId string + Owner string + Collation string + Compatibility string +} + +func resName(res, name string) string { + return fmt.Sprintf("%s_%s.%s", providerPrefix, res, name) } func getExample() resData { @@ -111,10 +120,6 @@ func getExample() resData { func TestAccInstance(t *testing.T) { exData := getExample() - resName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexbeta_instance.%s", - exData.TfName, - ) updNameData := exData updNameData.Name = "name-updated" @@ -133,8 +138,8 @@ func TestAccInstance(t *testing.T) { exData, ), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", exData.Name), - resource.TestCheckResourceAttrSet(resName, "id"), + resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name), + resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"), ), }, // Update name and verify @@ -144,7 +149,7 @@ func TestAccInstance(t *testing.T) { updNameData, ), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", updNameData.Name), + resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", updNameData.Name), ), }, // Update size and verify @@ -155,12 +160,15 @@ func TestAccInstance(t *testing.T) { ), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( - resName, + resName("instance", exData.TfName), "storage.size", strconv.Itoa(int(updSizeData.Size)), ), ), }, + { + RefreshState: true, + }, //// Import test //{ // ResourceName: "example_resource.test", @@ -171,49 +179,7 @@ func TestAccInstance(t *testing.T) { }) } -func TestAccInstanceWithUsers(t *testing.T) { - data := getExample() - userName := "testUser" - data.Users = []User{ - { - Name: userName, - ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), - Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, - }, - } - - resName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexbeta_instance.%s", - data.TfName, - ) - - resUserName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexbeta_user.%s", - userName, - ) - - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - // Create and verify - { - Config: testutils.StringFromTemplateMust( - "testdata/instance_template.gompl", - data, - ), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", data.Name), - resource.TestCheckResourceAttrSet(resName, "id"), - resource.TestCheckResourceAttr(resUserName, "name", userName), - resource.TestCheckResourceAttrSet(resUserName, "id"), - ), - }, - }, - }) -} - -func TestAccInstanceWithDatabases(t *testing.T) { +func TestAccInstanceNoEncryption(t *testing.T) { data := getExample() t.Logf(" ... working on instance %s", data.TfName) dbName := "testDb" @@ -233,20 +199,104 @@ func TestAccInstanceWithDatabases(t *testing.T) { }, } - resName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexbeta_instance.%s", - data.TfName, - ) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + data, + ), + Check: resource.ComposeAggregateTestCheckFunc( + // check instance values are set + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"), - resUserName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexbeta_user.%s", - userName, - ) + resource.TestCheckNoResourceAttr(resName("instance", data.TfName), "encryption"), - resDbName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexbeta_database.%s", - dbName, - ) + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), + + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), + + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"), + + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"), + + // check instance values are correct + resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name), + + // check user values are set + resource.TestCheckResourceAttrSet(resName("user", userName), "id"), + resource.TestCheckResourceAttrSet(resName("user", userName), "username"), + // resource.TestCheckResourceAttrSet(resName("user", userName), "roles"), + + func(s *terraform.State) error { + return nil + }, + + // check user values are correct + resource.TestCheckResourceAttr(resName("user", userName), "username", userName), + resource.TestCheckResourceAttr(resName("user", userName), "roles.#", "2"), + + // check database values are set + resource.TestCheckResourceAttrSet(resName("database", dbName), "id"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "name"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"), + + // check database values are correct + resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName), + resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName), + ), + }, + }, + }) +} + +func TestAccInstanceEncryption(t *testing.T) { + data := getExample() + t.Logf(" ... working on instance %s", data.TfName) + dbName := "testDb" + userName := "testUser" + data.Users = []User{ + { + Name: userName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, + }, + } + data.Databases = []Database{ + { + Name: dbName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Owner: userName, + }, + } + + data.UseEncryption = true + data.KekKeyId = "fe039bcf-8d7b-431a-801d-9e81371a6b7b" + data.KekKeyRingId = "6a2d95ab-3c4c-4963-a2bb-08d17a320e27" + data.KekKeyVersion = 1 + data.KekServiceAccount = "henselinm-u2v3ex1@sa.stackit.cloud" resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -259,15 +309,59 @@ func TestAccInstanceWithDatabases(t *testing.T) { data, ), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet(resName, "id"), - resource.TestCheckResourceAttr(resName, "name", data.Name), + // check instance values are set + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"), - resource.TestCheckResourceAttrSet(resUserName, "id"), - resource.TestCheckResourceAttr(resUserName, "username", userName), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), - resource.TestCheckResourceAttrSet(resDbName, "id"), - resource.TestCheckResourceAttr(resDbName, "name", dbName), - resource.TestCheckResourceAttr(resDbName, "owner", userName), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), + + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"), + + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"), + + // check instance values are correct + resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name), + + // check user values are set + resource.TestCheckResourceAttrSet(resName("user", userName), "id"), + resource.TestCheckResourceAttrSet(resName("user", userName), "username"), + + func(s *terraform.State) error { + return nil + }, + + // check user values are correct + resource.TestCheckResourceAttr(resName("user", userName), "username", userName), + resource.TestCheckResourceAttr(resName("user", userName), "roles.#", "2"), + + // check database values are set + resource.TestCheckResourceAttrSet(resName("database", dbName), "id"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "name"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"), + + // check database values are correct + resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName), + resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName), ), }, }, diff --git a/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl b/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl index 84ca19c1..e71f3fa0 100644 --- a/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl +++ b/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl @@ -15,8 +15,8 @@ resource "stackitprivatepreview_sqlserverflexbeta_instance" "{{ .TfName }}" { } {{ if .UseEncryption }} encryption = { - kek_key_id = {{ .KekKeyId }} - kek_key_ring_id = {{ .KekKeyRingId }} + kek_key_id = "{{ .KekKeyId }}" + kek_key_ring_id = "{{ .KekKeyRingId }}" kek_key_version = {{ .KekKeyVersion }} service_account = "{{ .KekServiceAccount }}" } @@ -49,8 +49,12 @@ resource "stackitprivatepreview_sqlserverflexbeta_database" "{{ $db.Name }}" { instance_id = stackitprivatepreview_sqlserverflexbeta_instance.{{ $tfName }}.instance_id name = "{{ $db.Name }}" owner = "{{ $db.Owner }}" - collation = "Albanian_BIN" - compatibility = "160" +{{ if $db.Collation }} + collation = "{{ $db.Collation }}" +{{ end }} +{{ if $db.Compatibility }} + compatibility = "{{ $db.Compatibility }}" +{{ end }} } {{ end }} {{ end }} From 10af1dbbba39db9275537ad9904616c4172c4f4c Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Fri, 13 Feb 2026 08:15:21 +0000 Subject: [PATCH 22/41] fix: postgres_fixes (#54) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/54 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- internal/testutils/functions.go | 4 + .../postgresflexalpha/database/mapper.go | 4 +- .../postgresflexalpha/database/resource.go | 39 +-- .../postgresflex_acc_test.go | 26 +- .../services/postgresflexalpha/user/mapper.go | 5 +- .../postgresflexalpha/user/resource.go | 222 ++++++++++-------- .../database/planModifiers.yaml | 3 +- .../sqlserverflex_acc_test.go | 1 + .../sqlserverflexbeta/user/planModifiers.yaml | 6 +- .../internal/wait/postgresflexalpha/wait.go | 72 +++++- 10 files changed, 242 insertions(+), 140 deletions(-) diff --git a/internal/testutils/functions.go b/internal/testutils/functions.go index e672d57c..0006751a 100644 --- a/internal/testutils/functions.go +++ b/internal/testutils/functions.go @@ -123,3 +123,7 @@ func StringFromTemplate(tplFile string, data any) (string, error) { return tplBuf.String(), nil } + +func ResStr(prefix, resource, name string) string { + return fmt.Sprintf("%s_%s.%s", prefix, resource, name) +} diff --git a/stackit/internal/services/postgresflexalpha/database/mapper.go b/stackit/internal/services/postgresflexalpha/database/mapper.go index 5785f4b7..9e38fe3f 100644 --- a/stackit/internal/services/postgresflexalpha/database/mapper.go +++ b/stackit/internal/services/postgresflexalpha/database/mapper.go @@ -51,8 +51,8 @@ func mapFields( return nil } -// mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource. -func mapResourceFields(source *postgresflexalpha.ListDatabase, model *resourceModel) error { +// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource. +func mapResourceFields(source *postgresflexalpha.GetDatabaseResponse, model *resourceModel) error { if source == nil { return fmt.Errorf("response is nil") } diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index e2013f29..530bb98c 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -6,22 +6,22 @@ import ( "errors" "fmt" "math" - "net/http" "strconv" "strings" + "time" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" - "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + postgresflexalpha3 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha" ) var ( @@ -236,7 +236,10 @@ func (r *databaseResource) Create( return } - database, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId) + database, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, region, instanceId, databaseId). + SetTimeout(15 * time.Minute). + SetSleepBeforeWait(15 * time.Second). + WaitWithContext(ctx) if err != nil { core.LogAndAddError( ctx, @@ -304,14 +307,17 @@ func (r *databaseResource) Read( ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "database_id", databaseId) - databaseResp, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId) + databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, region, instanceId, databaseId). + SetTimeout(15 * time.Minute). + SetSleepBeforeWait(15 * time.Second). + WaitWithContext(ctx) if err != nil { - oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped - if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) { - resp.State.RemoveResource(ctx) - return - } - core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating database", + fmt.Sprintf("Getting database details after creation: %v", err), + ) return } @@ -433,15 +439,12 @@ func (r *databaseResource) Update( ctx = core.LogResponse(ctx) - // Map response body to schema - databaseResp, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId64) + databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, region, instanceId, databaseId64). + SetTimeout(15 * time.Minute). + SetSleepBeforeWait(15 * time.Second). + WaitWithContext(ctx) if err != nil { - oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped - if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) { - resp.State.RemoveResource(ctx) - return - } - core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err)) + core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error()) return } diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go index 29facfee..7ec8c4f0 100644 --- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go +++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go @@ -19,6 +19,8 @@ import ( fwresource "github.com/hashicorp/terraform-plugin-framework/resource" ) +const pfx = "stackitprivatepreview_postgresflexalpha" + func TestInstanceResourceSchema(t *testing.T) { t.Parallel() @@ -149,6 +151,8 @@ func getExample() resData { func TestAccInstance(t *testing.T) { exData := getExample() + t.Logf(" ... working on instance %s", exData.TfName) + resName := fmt.Sprintf( "stackitprivatepreview_postgresflexalpha_instance.%s", exData.TfName, @@ -211,6 +215,8 @@ func TestAccInstance(t *testing.T) { func TestAccInstanceWithUsers(t *testing.T) { data := getExample() + t.Logf(" ... working on instance %s", data.TfName) + userName := "testUser" data.Users = []User{ { @@ -220,16 +226,6 @@ func TestAccInstanceWithUsers(t *testing.T) { }, } - resName := fmt.Sprintf( - "stackitprivatepreview_postgresflexalpha_instance.%s", - data.TfName, - ) - - resUserName := fmt.Sprintf( - "stackitprivatepreview_postgresflexalpha_user.%s", - userName, - ) - resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, @@ -241,10 +237,10 @@ func TestAccInstanceWithUsers(t *testing.T) { data, ), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", data.Name), - resource.TestCheckResourceAttrSet(resName, "id"), - resource.TestCheckResourceAttr(resUserName, "name", userName), - resource.TestCheckResourceAttrSet(resUserName, "id"), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"), ), }, }, @@ -253,6 +249,8 @@ func TestAccInstanceWithUsers(t *testing.T) { func TestAccInstanceWithDatabases(t *testing.T) { data := getExample() + t.Logf(" ... working on instance %s", data.TfName) + dbName := "testDb" userName := "testUser" data.Users = []User{ diff --git a/stackit/internal/services/postgresflexalpha/user/mapper.go b/stackit/internal/services/postgresflexalpha/user/mapper.go index 37c8fc1f..8fb4278f 100644 --- a/stackit/internal/services/postgresflexalpha/user/mapper.go +++ b/stackit/internal/services/postgresflexalpha/user/mapper.go @@ -7,7 +7,6 @@ import ( "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) @@ -80,7 +79,7 @@ func toUpdatePayload(model *resourceModel, roles *[]string) ( } return &postgresflex.UpdateUserRequestPayload{ - Name: conversion.StringValueToPointer(model.Name), + Name: model.Name.ValueStringPointer(), Roles: toPayloadRoles(roles), }, nil } @@ -96,7 +95,7 @@ func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.Creat return &postgresflex.CreateUserRequestPayload{ Roles: toPayloadRoles(roles), - Name: conversion.StringValueToPointer(model.Name), + Name: model.Name.ValueStringPointer(), }, nil } diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go index 8984a4ea..c6cad445 100644 --- a/stackit/internal/services/postgresflexalpha/user/resource.go +++ b/stackit/internal/services/postgresflexalpha/user/resource.go @@ -3,24 +3,23 @@ package postgresflexalpha import ( "context" _ "embed" - "errors" "fmt" "math" - "net/http" "strconv" "strings" + "time" "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" + postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" - "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" @@ -52,7 +51,7 @@ type UserResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` InstanceID types.String `tfsdk:"instance_id"` - UserID types.Int64 `tfsdk:"database_id"` + UserID types.Int64 `tfsdk:"user_id"` } // userResource implements the resource handling for a PostgreSQL Flex user. @@ -150,13 +149,6 @@ func (r *userResource) Create( return } - // Read identity data - var identityData UserResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) arg := &clientArg{ @@ -178,6 +170,7 @@ func (r *userResource) Create( core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err)) return } + // Create new user userResp, err := r.client.CreateUserRequest( ctx, @@ -185,13 +178,13 @@ func (r *userResource) Create( arg.region, arg.instanceId, ).CreateUserRequestPayload(*payload).Execute() - if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err)) return } - if userResp.Id == nil || *userResp.Id == 0 { + id, ok := userResp.GetIdOk() + if !ok || id == 0 { core.LogAndAddError( ctx, &resp.Diagnostics, @@ -200,13 +193,9 @@ func (r *userResource) Create( ) return } - model.Id = types.Int64Value(userResp.GetId()) - model.UserId = types.Int64Value(userResp.GetId()) - model.Password = types.StringValue(userResp.GetPassword()) - model.Status = types.StringValue(userResp.GetStatus()) - model.ConnectionString = types.StringValue(userResp.GetConnectionString()) + arg.userId = id - ctx = tflog.SetField(ctx, "user_id", userResp.GetId()) + ctx = tflog.SetField(ctx, "user_id", id) ctx = core.LogResponse(ctx) @@ -215,28 +204,65 @@ func (r *userResource) Create( ProjectID: types.StringValue(arg.projectId), Region: types.StringValue(arg.region), InstanceID: types.StringValue(arg.instanceId), - UserID: types.Int64Value(userResp.GetId()), + UserID: types.Int64Value(id), } resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) if resp.Diagnostics.HasError() { return } - // Verify creation - exists, err := r.getUserResource(ctx, &model, arg) + model.Id = types.Int64Value(id) + model.UserId = types.Int64Value(id) + model.Password = types.StringValue(userResp.GetPassword()) + model.Status = types.StringValue(userResp.GetStatus()) + model.ConnectionString = types.StringValue(userResp.GetConnectionString()) + + waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler( + ctx, + r.client, + arg.projectId, + arg.instanceId, + arg.region, + id, + ).SetSleepBeforeWait( + 10 * time.Second, + ).SetTimeout( + 15 * time.Minute, + ).WaitWithContext(ctx) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err)) - return - } - - if !exists { core.LogAndAddError( - ctx, &resp.Diagnostics, "Error creating user", - fmt.Sprintf("User ID '%v' resource not found after creation", model.UserId.ValueInt64()), + ctx, + &resp.Diagnostics, + "create user", + fmt.Sprintf("Instance creation waiting: %v", err), ) return } + + if waitResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "create user", + "Instance creation waiting: returned id is nil", + ) + return + } + if waitResp.Id == nil || *waitResp.Id != id { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "create user", + fmt.Sprintf( + "Instance creation waiting: returned id is wrong: %+v - %+v", + waitResp.Id, + id, + ), + ) + return + } + // Set state to fully populated data diags = resp.State.Set(ctx, model) resp.Diagnostics.Append(diags...) @@ -272,17 +298,39 @@ func (r *userResource) Read( ctx = core.InitProviderContext(ctx) // Read resource state - exists, err := r.getUserResource(ctx, &model, arg) + waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler( + ctx, + r.client, + arg.projectId, + arg.instanceId, + arg.region, + model.UserId.ValueInt64(), + ).SetSleepBeforeWait( + 10 * time.Second, + ).SetTimeout( + 15 * time.Minute, + ).WaitWithContext(ctx) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading user", fmt.Sprintf("Calling API: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "read user", + fmt.Sprintf("Instance creation waiting: %v", err), + ) return } - if !exists { - resp.State.RemoveResource(ctx) + if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "read user", + "Instance creation waiting: returned id is nil or wrong", + ) return } + arg.userId = *waitResp.Id ctx = core.LogResponse(ctx) @@ -320,23 +368,12 @@ func (r *userResource) Update( return } - // Read identity data - var identityData UserResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) - arg, errExt := r.extractIdentityData(model, identityData) - if errExt != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - extractErrorSummary, - fmt.Sprintf(extractErrorMessage, errExt), - ) + arg := &clientArg{ + projectId: model.ProjectId.ValueString(), + instanceId: model.InstanceId.ValueString(), + region: r.providerData.GetRegionWithOverride(model.Region), } ctx = r.setTFLogFields(ctx, arg) @@ -397,21 +434,40 @@ func (r *userResource) Update( } // Verify update - exists, err := r.getUserResource(ctx, &stateModel, arg) + waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler( + ctx, + r.client, + arg.projectId, + arg.instanceId, + arg.region, + model.UserId.ValueInt64(), + ).SetSleepBeforeWait( + 10 * time.Second, + ).SetTimeout( + 15 * time.Minute, + ).WaitWithContext(ctx) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Calling API: %v", err)) - return - } - - if !exists { core.LogAndAddError( - ctx, &resp.Diagnostics, "Error updating user", - fmt.Sprintf("User ID '%v' resource not found after update", stateModel.UserId.ValueInt64()), + ctx, + &resp.Diagnostics, + "read user", + fmt.Sprintf("Instance creation waiting: %v", err), ) return } + if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "read user", + "Instance creation waiting: returned id is nil or wrong", + ) + return + } + arg.userId = *waitResp.Id + // Set state to fully populated data diags = resp.State.Set(ctx, stateModel) resp.Diagnostics.Append(diags...) @@ -470,19 +526,19 @@ func (r *userResource) Delete( ctx = core.LogResponse(ctx) - // Verify deletion - exists, err := r.getUserResource(ctx, &model, arg) - if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err)) - return - } - if exists { - core.LogAndAddError( - ctx, &resp.Diagnostics, "Error deleting user", - fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt64()), - ) - return - } + // TODO: Verify deletion + //exists, err := r.getUserResource(ctx, &model, arg) + //if err != nil { + // core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err)) + // return + //} + //if exists { + // core.LogAndAddError( + // ctx, &resp.Diagnostics, "Error deleting user", + // fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt64()), + // ) + // return + //} resp.State.RemoveResource(ctx) @@ -513,34 +569,6 @@ func (r *userResource) IdentitySchema( } } -// getUserResource refreshes the resource state by calling the API and mapping the response to the model. -// Returns true if the resource state was successfully refreshed, false if the resource does not exist. -func (r *userResource) getUserResource(ctx context.Context, model *resourceModel, arg *clientArg) (bool, error) { - - if arg.userId > math.MaxInt32 { - return false, errors.New("error in type conversion: int value too large (userId)") - } - userId := int32(arg.userId) - - // API Call - userResp, err := r.client.GetUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() - - if err != nil { - var oapiErr *oapierror.GenericOpenAPIError - if errors.As(err, &oapiErr) && oapiErr.StatusCode == http.StatusNotFound { - return false, nil - } - - return false, fmt.Errorf("error fetching user resource: %w", err) - } - - if err := mapResourceFields(userResp, model, arg.region); err != nil { - return false, fmt.Errorf("error mapping user resource: %w", err) - } - - return true, nil -} - // clientArg holds the arguments for API calls. type clientArg struct { projectId string diff --git a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml index d6209230..1d010ed7 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml +++ b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml @@ -31,7 +31,7 @@ fields: - name: 'owner' modifiers: - - 'RequiresReplace' + - 'UseStateForUnknown' - name: 'database_name' modifiers: @@ -43,6 +43,7 @@ fields: - name: 'compatibility' modifiers: + - 'UseStateForUnknown' - 'RequiresReplace' - name: 'compatibility_level' diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go index e6c776dc..33f7cd29 100644 --- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go @@ -120,6 +120,7 @@ func getExample() resData { func TestAccInstance(t *testing.T) { exData := getExample() + t.Logf(" ... working on instance %s", exData.TfName) updNameData := exData updNameData.Name = "name-updated" diff --git a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml index fe4025ee..8ff346ab 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml +++ b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml @@ -3,16 +3,13 @@ fields: modifiers: - 'UseStateForUnknown' - - name: 'user_id' - modifiers: - - 'UseStateForUnknown' - - name: 'instance_id' validators: - validate.NoSeparator - validate.UUID modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'project_id' validators: @@ -28,6 +25,7 @@ fields: - name: 'user_id' modifiers: + - 'UseStateForUnknown' - 'RequiresReplace' - name: 'username' diff --git a/stackit/internal/wait/postgresflexalpha/wait.go b/stackit/internal/wait/postgresflexalpha/wait.go index c490b605..8c8b80b3 100644 --- a/stackit/internal/wait/postgresflexalpha/wait.go +++ b/stackit/internal/wait/postgresflexalpha/wait.go @@ -2,7 +2,10 @@ package postgresflexalpha import ( "context" + "errors" "fmt" + "math" + "net/http" "time" "github.com/hashicorp/terraform-plugin-log/tflog" @@ -40,10 +43,18 @@ type APIClientInstanceInterface interface { // APIClientUserInterface Interface needed for tests type APIClientUserInterface interface { - GetUserRequestExecute(ctx context.Context, projectId, region, instanceId string, userId int64) ( + GetUserRequestExecute(ctx context.Context, projectId, region, instanceId string, userId int32) ( *postgresflex.GetUserResponse, error, ) + + GetDatabaseRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + databaseId int32, + ) (*postgresflex.GetDatabaseResponse, error) } // CreateInstanceWaitHandler will wait for instance creation @@ -202,3 +213,62 @@ func PartialUpdateInstanceWaitHandler( handler.SetTimeout(45 * time.Minute).SetSleepBeforeWait(30 * time.Second) return handler } + +// GetUserByIdWaitHandler will wait for instance creation +func GetUserByIdWaitHandler( + ctx context.Context, + a APIClientUserInterface, + projectId, instanceId, region string, + userId int64, +) *wait.AsyncActionHandler[postgresflex.GetUserResponse] { + handler := wait.New( + func() (waitFinished bool, response *postgresflex.GetUserResponse, err error) { + if userId > math.MaxInt32 { + return false, nil, fmt.Errorf("userId value is too big for int32") + } + userId32 := int32(userId) + s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId32) + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return false, nil, nil + } + return true, s, nil + }, + ) + return handler +} + +// GetDatabaseByIdWaitHandler will wait for instance creation +func GetDatabaseByIdWaitHandler( + ctx context.Context, + a APIClientUserInterface, + projectId, instanceId, region string, + databaseId int64, +) *wait.AsyncActionHandler[postgresflex.GetDatabaseResponse] { + handler := wait.New( + func() (waitFinished bool, response *postgresflex.GetDatabaseResponse, err error) { + dbId32 := int32(databaseId) + s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, dbId32) + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return false, nil, nil + } + return true, s, nil + }, + ) + return handler +} From 843fc46f54a9bc5c7262c548d1b98d030cdad1fd Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Fri, 13 Feb 2026 10:38:19 +0000 Subject: [PATCH 23/41] fix: tests (#57) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/57 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../postgresflexalpha/database/mapper_test.go | 6 +- .../postgresflex_acc_test.go | 67 +++++++++++----- .../postgresflexalpha/user/resource.go | 6 +- .../sqlserverflex_acc_test.go | 77 +++++++++++++++---- .../internal/wait/sqlserverflexbeta/wait.go | 2 +- .../wait/sqlserverflexbeta/wait_test.go | 53 ++++++++++++- 6 files changed, 172 insertions(+), 39 deletions(-) diff --git a/stackit/internal/services/postgresflexalpha/database/mapper_test.go b/stackit/internal/services/postgresflexalpha/database/mapper_test.go index 97212237..cd979b22 100644 --- a/stackit/internal/services/postgresflexalpha/database/mapper_test.go +++ b/stackit/internal/services/postgresflexalpha/database/mapper_test.go @@ -132,7 +132,7 @@ func TestMapFields(t *testing.T) { func TestMapResourceFields(t *testing.T) { type given struct { - source *postgresflexalpha.ListDatabase + source *postgresflexalpha.GetDatabaseResponse model *resourceModel } type expected struct { @@ -148,10 +148,10 @@ func TestMapResourceFields(t *testing.T) { { name: "should map fields correctly", given: given{ - source: &postgresflexalpha.ListDatabase{ + source: &postgresflexalpha.GetDatabaseResponse{ Id: utils.Ptr(int64(1)), Name: utils.Ptr("my-db"), - Owner: utils.Ptr("\"my-owner\""), + Owner: utils.Ptr("my-owner"), }, model: &resourceModel{}, }, diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go index 7ec8c4f0..099698c2 100644 --- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go +++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go @@ -21,6 +21,38 @@ import ( const pfx = "stackitprivatepreview_postgresflexalpha" +var createdInstances []string + +func init() { + sweeperName := fmt.Sprintf("%s_%s", pfx, "sweeper") + resource.AddTestSweepers(sweeperName, &resource.Sweeper{ + Name: sweeperName, + F: func(region string) error { + + //client, err := sharedClientForRegion(region) + //if err != nil { + // return fmt.Errorf("Error getting client: %s", err) + //} + //conn := client.(*ExampleClient) + // + //instances, err := conn.DescribeComputeInstances() + //if err != nil { + // return fmt.Errorf("Error getting instances: %s", err) + //} + //for _, instance := range instances { + // if strings.HasPrefix(instance.Name, "test-acc") { + // err := conn.DestroyInstance(instance.ID) + // + // if err != nil { + // log.Printf("Error destroying %s during sweep: %s", instance.Name, err) + // } + // } + //} + return nil + }, + }) +} + func TestInstanceResourceSchema(t *testing.T) { t.Parallel() @@ -58,13 +90,6 @@ func TestMain(m *testing.M) { os.Exit(code) } -//var ( -// validFlavor = "2.4" -// kekKeyRingId = "" -// kekKeyVersion = "" -// kekKeySA = "" -//) - func testAccPreCheck(t *testing.T) { if _, ok := os.LookupEnv("TF_ACC_PROJECT_ID"); !ok { t.Fatalf("could not find env var TF_ACC_PROJECT_ID") @@ -151,7 +176,6 @@ func getExample() resData { func TestAccInstance(t *testing.T) { exData := getExample() - t.Logf(" ... working on instance %s", exData.TfName) resName := fmt.Sprintf( "stackitprivatepreview_postgresflexalpha_instance.%s", @@ -164,8 +188,11 @@ func TestAccInstance(t *testing.T) { updSizeData := exData updSizeData.Size = 25 - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", exData.TfName) + }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ // Create and verify @@ -215,7 +242,6 @@ func TestAccInstance(t *testing.T) { func TestAccInstanceWithUsers(t *testing.T) { data := getExample() - t.Logf(" ... working on instance %s", data.TfName) userName := "testUser" data.Users = []User{ @@ -226,8 +252,11 @@ func TestAccInstanceWithUsers(t *testing.T) { }, } - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", data.TfName) + }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ // Create and verify @@ -249,7 +278,6 @@ func TestAccInstanceWithUsers(t *testing.T) { func TestAccInstanceWithDatabases(t *testing.T) { data := getExample() - t.Logf(" ... working on instance %s", data.TfName) dbName := "testDb" userName := "testUser" @@ -284,8 +312,11 @@ func TestAccInstanceWithDatabases(t *testing.T) { dbName, ) - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", data.TfName) + }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ // Create and verify @@ -367,7 +398,7 @@ func TestAccInstanceWithDatabases(t *testing.T) { // ProjectID: project_id, // Name: "testRes", // } -// resource.Test(t, resource.TestCase{ +// resource.ParallelTest(t, resource.TestCase{ // ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, // Steps: []resource.TestStep{ // { @@ -510,7 +541,7 @@ func TestAccInstanceWithDatabases(t *testing.T) { //} // //func TestAccPostgresFlexFlexResource(t *testing.T) { -// resource.Test( +// resource.ParallelTest( // t, resource.TestCase{ // ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, // CheckDestroy: testAccCheckPostgresFlexDestroy, diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go index c6cad445..1e50a0fe 100644 --- a/stackit/internal/services/postgresflexalpha/user/resource.go +++ b/stackit/internal/services/postgresflexalpha/user/resource.go @@ -452,7 +452,7 @@ func (r *userResource) Update( ctx, &resp.Diagnostics, "read user", - fmt.Sprintf("Instance creation waiting: %v", err), + fmt.Sprintf("user update waiting: %v", err), ) return } @@ -461,8 +461,8 @@ func (r *userResource) Update( core.LogAndAddError( ctx, &resp.Diagnostics, - "read user", - "Instance creation waiting: returned id is nil or wrong", + "update user", + "User creation waiting: returned id is nil or wrong", ) return } diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go index 33f7cd29..4c63267d 100644 --- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go @@ -4,16 +4,19 @@ import ( "context" _ "embed" "fmt" + "log" "os" "strconv" + "strings" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" - sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" - + "github.com/stackitcloud/stackit-sdk-go/core/config" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" + sqlserverflexbeta2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" // The fwresource import alias is so there is no collision // with the more typical acceptance testing import: // "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -22,6 +25,39 @@ import ( const providerPrefix = "stackitprivatepreview_sqlserverflexbeta" +func init() { + sweeperName := fmt.Sprintf("%s_%s", providerPrefix, "sweeper") + + resource.AddTestSweepers(sweeperName, &resource.Sweeper{ + Name: sweeperName, + F: func(region string) error { + ctx := context.Background() + apiClientConfigOptions := []config.ConfigurationOption{} + apiClient, err := sqlserverflexbeta2.NewAPIClient(apiClientConfigOptions...) + if err != nil { + log.Fatalln(err) + } + + instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region). + Size(100). + Execute() + if err != nil { + log.Fatalln(err) + } + + for _, inst := range instances.GetInstances() { + if strings.HasPrefix(inst.GetName(), "tf-acc-") { + delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, testutils.Region, inst.GetId()) + if delErr != nil { + log.Fatalln(delErr) + } + } + } + return nil + }, + }) +} + func TestInstanceResourceSchema(t *testing.T) { t.Parallel() @@ -120,7 +156,6 @@ func getExample() resData { func TestAccInstance(t *testing.T) { exData := getExample() - t.Logf(" ... working on instance %s", exData.TfName) updNameData := exData updNameData.Name = "name-updated" @@ -128,8 +163,11 @@ func TestAccInstance(t *testing.T) { updSizeData := exData updSizeData.Size = 25 - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", exData.TfName) + }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ // Create and verify @@ -182,14 +220,21 @@ func TestAccInstance(t *testing.T) { func TestAccInstanceNoEncryption(t *testing.T) { data := getExample() - t.Logf(" ... working on instance %s", data.TfName) + dbName := "testDb" userName := "testUser" data.Users = []User{ { Name: userName, ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), - Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, + Roles: []string{ + "##STACKIT_DatabaseManager##", + "##STACKIT_LoginManager##", + "##STACKIT_ProcessManager##", + "##STACKIT_ServerManager##", + "##STACKIT_SQLAgentManager##", + "##STACKIT_SQLAgentUser##", + }, }, } data.Databases = []Database{ @@ -200,8 +245,11 @@ func TestAccInstanceNoEncryption(t *testing.T) { }, } - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", data.TfName) + }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ // Create and verify @@ -255,7 +303,7 @@ func TestAccInstanceNoEncryption(t *testing.T) { // check user values are correct resource.TestCheckResourceAttr(resName("user", userName), "username", userName), - resource.TestCheckResourceAttr(resName("user", userName), "roles.#", "2"), + resource.TestCheckResourceAttr(resName("user", userName), "roles.#", strconv.Itoa(len(data.Users[0].Roles))), // check database values are set resource.TestCheckResourceAttrSet(resName("database", dbName), "id"), @@ -275,7 +323,7 @@ func TestAccInstanceNoEncryption(t *testing.T) { func TestAccInstanceEncryption(t *testing.T) { data := getExample() - t.Logf(" ... working on instance %s", data.TfName) + dbName := "testDb" userName := "testUser" data.Users = []User{ @@ -299,8 +347,11 @@ func TestAccInstanceEncryption(t *testing.T) { data.KekKeyVersion = 1 data.KekServiceAccount = "henselinm-u2v3ex1@sa.stackit.cloud" - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", data.TfName) + }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ // Create and verify diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go index b6a5bfcd..9eb6657b 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait.go @@ -91,7 +91,7 @@ func CreateInstanceWaitHandler( } switch strings.ToLower(string(*s.Status)) { case strings.ToLower(InstanceStateSuccess): - if *s.Network.AccessScope == "SNA" { + if s.Network != nil && s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" { if s.Network.InstanceAddress == nil { tflog.Info(ctx, "Waiting for instance_address") return false, nil, nil diff --git a/stackit/internal/wait/sqlserverflexbeta/wait_test.go b/stackit/internal/wait/sqlserverflexbeta/wait_test.go index 35b66cf6..74579904 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait_test.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait_test.go @@ -20,6 +20,57 @@ type apiClientInstanceMocked struct { instanceGetFails bool } +type ListUsersRequestRequest struct{} + +func (l ListUsersRequestRequest) Page(page int64) sqlserverflex.ApiListUsersRequestRequest { + return l +} + +func (l ListUsersRequestRequest) Size(size int64) sqlserverflex.ApiListUsersRequestRequest { + return l +} + +func (l ListUsersRequestRequest) Sort(sort sqlserverflex.UserSort) sqlserverflex.ApiListUsersRequestRequest { + return l +} + +func (l ListUsersRequestRequest) Execute() (*sqlserverflex.ListUserResponse, error) { + //TODO implement me + panic("implement me") +} + +func (a *apiClientInstanceMocked) ListUsersRequest( + ctx context.Context, + projectId string, + region string, + instanceId string, +) sqlserverflex.ApiListUsersRequestRequest { + return ListUsersRequestRequest{} +} + +func (a *apiClientInstanceMocked) ListRolesRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, +) (*sqlserverflex.ListRolesResponse, error) { + return &sqlserverflex.ListRolesResponse{ + Roles: &[]string{}, + }, nil +} + +func (a *apiClientInstanceMocked) ListUsersRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, +) (*sqlserverflex.ListUserResponse, error) { + return &sqlserverflex.ListUserResponse{ + Pagination: nil, + Users: nil, + }, nil +} + func (a *apiClientInstanceMocked) GetDatabaseRequestExecute( _ context.Context, projectId string, @@ -63,7 +114,7 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute( }, nil } func TestCreateInstanceWaitHandler(t *testing.T) { - t.Skip("skipping - needs refactoring") + //t.Skip("skipping - needs refactoring") tests := []struct { desc string instanceGetFails bool From e01ae1a920a10cb395c6274fd860d473534e5601 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Fri, 13 Feb 2026 14:27:14 +0000 Subject: [PATCH 24/41] fix: fix lintings (#58) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/58 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .github/workflows/ci.yaml | 104 +++++++++++--- Makefile | 4 +- cmd/cmd/build/build.go | 4 +- cmd/cmd/buildCmd.go | 1 + cmd/cmd/getFieldsCmd.go | 4 +- cmd/cmd/publish/architecture.go | 2 +- cmd/cmd/publishCmd.go | 1 + cmd/main.go | 1 + docs/resources/postgresflexalpha_user.md | 2 +- docs/resources/sqlserverflexalpha_user.md | 2 +- golang-ci.yaml | 30 +++- internal/testutils/functions.go | 2 +- internal/testutils/testutils.go | 4 +- main.go | 1 + stackit/internal/conversion/conversion.go | 1 + .../internal/conversion/conversion_test.go | 1 + stackit/internal/features/beta.go | 1 + stackit/internal/features/beta_test.go | 1 + stackit/internal/features/experiments.go | 1 + stackit/internal/features/experiments_test.go | 1 + .../postgresflexalpha/database/datasource.go | 1 + .../database/functions_test.go | 1 + .../postgresflexalpha/database/mapper.go | 1 + .../postgresflexalpha/database/mapper_test.go | 1 + .../postgresflexalpha/database/resource.go | 22 ++- .../postgresflexalpha/flavor/datasource.go | 2 + .../flavor/functions_test.go | 1 + .../postgresflexalpha/flavors/datasource.go | 1 + .../postgresflexalpha/instance/datasource.go | 2 + .../postgresflexalpha/instance/functions.go | 1 + .../instance/functions_test.go | 1 + .../postgresflexalpha/instance/resource.go | 9 +- .../postgresflex_acc_test.go | 115 ++++++++------- .../postgresflexalpha/user/datasource.go | 4 +- .../services/postgresflexalpha/user/mapper.go | 1 + .../postgresflexalpha/user/mapper_test.go | 1 + .../postgresflexalpha/user/resource.go | 12 +- .../services/postgresflexalpha/utils/util.go | 1 + .../postgresflexalpha/utils/util_test.go | 1 + .../sqlserverflexalpha/database/datasource.go | 1 + .../sqlserverflexalpha/database/mapper.go | 1 + .../database/mapper_test.go | 1 + .../sqlserverflexalpha/database/resource.go | 1 + .../flavor/functions_test.go | 1 + .../sqlserverflexalpha/flavors/datasource.go | 1 + .../sqlserverflexalpha/instance/datasource.go | 10 +- .../sqlserverflexalpha/instance/functions.go | 3 +- .../sqlserverflexalpha/instance/resource.go | 2 + .../instance/resource_test.go | 2 +- .../sqlserverflex_acc_test.go | 3 +- .../sqlserverflexalpha/user/datasource.go | 1 + .../sqlserverflexalpha/user/mapper.go | 1 + .../sqlserverflexalpha/user/mapper_test.go | 1 + .../sqlserverflexalpha/user/resource.go | 4 +- .../services/sqlserverflexalpha/utils/util.go | 1 + .../sqlserverflexalpha/utils/util_test.go | 1 + .../sqlserverflexbeta/database/datasource.go | 1 + .../sqlserverflexbeta/database/mapper.go | 1 + .../sqlserverflexbeta/database/mapper_test.go | 23 +-- .../sqlserverflexbeta/database/resource.go | 3 +- .../sqlserverflexbeta/flavor/datasource.go | 2 +- .../flavor/functions_test.go | 1 + .../sqlserverflexbeta/flavors/datasource.go | 3 +- .../sqlserverflexbeta/instance/datasource.go | 1 + .../sqlserverflexbeta/instance/functions.go | 3 +- .../sqlserverflexbeta/instance/resource.go | 1 + .../sqlserverflex_acc_test.go | 76 +++++----- .../sqlserverflexbeta/user/datasource.go | 1 + .../services/sqlserverflexbeta/user/mapper.go | 1 + .../sqlserverflexbeta/user/mapper_test.go | 1 + .../sqlserverflexbeta/user/resource.go | 3 +- .../services/sqlserverflexbeta/utils/util.go | 1 + .../sqlserverflexbeta/utils/util_test.go | 1 + stackit/internal/utils/attributes.go | 1 + stackit/internal/utils/regions.go | 1 + stackit/internal/utils/utils.go | 1 + stackit/internal/validate/validate.go | 1 + .../internal/wait/postgresflexalpha/wait.go | 24 +++- .../wait/postgresflexalpha/wait_test.go | 1 + .../internal/wait/sqlserverflexalpha/wait.go | 2 + .../wait/sqlserverflexalpha/wait_test.go | 1 + .../internal/wait/sqlserverflexbeta/wait.go | 5 +- .../wait/sqlserverflexbeta/wait_test.go | 133 +++++++++--------- stackit/provider.go | 1 + stackit/provider_acc_test.go | 1 + 85 files changed, 430 insertions(+), 246 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 693eec61..f229dcb4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -104,11 +104,67 @@ jobs: --gpgPubKeyFile=public_key.pem \ --version=${VERSION} + testing: + name: CI run tests + runs-on: ubuntu-latest + needs: config + env: + TF_ACC_PROJECT_ID: ${{ vars.TF_ACC_PROJECT_ID }} + TF_ACC_REGION: ${{ vars.TF_ACC_REGION }} + TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ vars.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }} + TF_ACC_SERVICE_ACCOUNT_FILE: "~/service_account.json" + steps: + - name: Checkout + uses: actions/checkout@v6 + - name: Build + uses: ./.github/actions/build + with: + go-version: ${{ env.GO_VERSION }} + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v2 + with: + terraform_wrapper: false + + - name: Create service account json file + if: ${{ github.event_name == 'pull_request' }} + run: | + echo "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}" >~/service_account.json + + - name: Run go mod tidy + if: ${{ github.event_name == 'pull_request' }} + run: go mod tidy + + - name: Testing + run: make test + + - name: Acceptance Testing + env: + TF_ACC: "1" + if: ${{ github.event_name == 'pull_request' }} + run: make test-acceptance-tf + + - name: Check coverage threshold + shell: bash + run: | + make coverage + COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//') + echo "Coverage: $COVERAGE%" + if (( $(echo "$COVERAGE < 80" | bc -l) )); then + echo "Coverage is below 80%" + # exit 1 + fi + + - name: Archive code coverage results + uses: actions/upload-artifact@v4 + with: + name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }} + path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}" main: if: ${{ github.event_name != 'schedule' }} - name: CI + name: CI run build and linting runs-on: ubuntu-latest needs: config steps: @@ -137,31 +193,37 @@ jobs: - name: golangci-lint uses: golangci/golangci-lint-action@v9 with: - version: v2.7 + version: v2.9 args: --config=golang-ci.yaml --allow-parallel-runners --timeout=5m + continue-on-error: true - - name: Lint + - name: Linting run: make lint + continue-on-error: true - - name: Test - run: make test +# - name: Testing +# run: make test +# +# - name: Acceptance Testing +# if: ${{ github.event_name == 'pull_request' }} +# run: make test-acceptance-tf +# +# - name: Check coverage threshold +# shell: bash +# run: | +# make coverage +# COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//') +# echo "Coverage: $COVERAGE%" +# if (( $(echo "$COVERAGE < 80" | bc -l) )); then +# echo "Coverage is below 80%" +# # exit 1 +# fi - - name: Check coverage threshold - shell: bash - run: | - make coverage - COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//') - echo "Coverage: $COVERAGE%" - if (( $(echo "$COVERAGE < 80" | bc -l) )); then - echo "Coverage is below 80%" - # exit 1 - fi - - - name: Archive code coverage results - uses: actions/upload-artifact@v4 - with: - name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }} - path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}" +# - name: Archive code coverage results +# uses: actions/upload-artifact@v4 +# with: +# name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }} +# path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}" config: if: ${{ github.event_name != 'schedule' }} diff --git a/Makefile b/Makefile index f678f099..86a2a0f2 100644 --- a/Makefile +++ b/Makefile @@ -37,12 +37,12 @@ fmt: .PHONY: test coverage test: @echo "Running tests for the terraform provider" - @cd $(ROOT_DIR)/stackit && go test ./... -count=1 -coverprofile=../coverage.out && cd $(ROOT_DIR) + @cd $(ROOT_DIR)/stackit && go test -timeout 0 ./... -count=1 -coverprofile=../coverage.out && cd $(ROOT_DIR) # Test coverage coverage: @echo ">> Creating test coverage report for the terraform provider" - @cd $(ROOT_DIR)/stackit && (go test ./... -count=1 -coverprofile=../coverage.out || true) && cd $(ROOT_DIR) + @cd $(ROOT_DIR)/stackit && (go test -timeout 0 ./... -count=1 -coverprofile=../coverage.out || true) && cd $(ROOT_DIR) @cd $(ROOT_DIR)/stackit && go tool cover -html=../coverage.out -o ../coverage.html && cd $(ROOT_DIR) test-acceptance-tf: diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go index c80401fa..d381de63 100644 --- a/cmd/cmd/build/build.go +++ b/cmd/cmd/build/build.go @@ -693,7 +693,7 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error { if err != nil { return err } - if _, err := io.WriteString(tmp, resLine+"\n"); err != nil { + if _, err := tmp.WriteString(resLine + "\n"); err != nil { return err } } @@ -941,7 +941,7 @@ func getTokens(fileName string) ([]string, error) { result = append(result, tts.Names[0].String()) - //fld, fldOk := tts.Type.(*ast.Ident) + // fld, fldOk := tts.Type.(*ast.Ident) //if fldOk { // fmt.Printf("type: %+v\n", fld) //} diff --git a/cmd/cmd/buildCmd.go b/cmd/cmd/buildCmd.go index 8902132e..b9f4c835 100644 --- a/cmd/cmd/buildCmd.go +++ b/cmd/cmd/buildCmd.go @@ -2,6 +2,7 @@ package cmd import ( "github.com/spf13/cobra" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/build" ) diff --git a/cmd/cmd/getFieldsCmd.go b/cmd/cmd/getFieldsCmd.go index f24db87d..48cb379a 100644 --- a/cmd/cmd/getFieldsCmd.go +++ b/cmd/cmd/getFieldsCmd.go @@ -75,7 +75,7 @@ var getFieldsCmd = &cobra.Command{ filePath = p //// Enum check - //switch format { + // switch format { //case "json", "yaml": //default: // return fmt.Errorf("invalid --format: %s (want json|yaml)", format) @@ -121,7 +121,7 @@ func getTokens(fileName string) ([]string, error) { result = append(result, tts.Names[0].String()) - //fld, fldOk := tts.Type.(*ast.Ident) + // fld, fldOk := tts.Type.(*ast.Ident) //if fldOk { // fmt.Printf("type: %+v\n", fld) //} diff --git a/cmd/cmd/publish/architecture.go b/cmd/cmd/publish/architecture.go index f77188c3..5fffa585 100644 --- a/cmd/cmd/publish/architecture.go +++ b/cmd/cmd/publish/architecture.go @@ -142,7 +142,7 @@ func (p *Provider) CreateArchitectureFiles() error { // ] // } //} - //`, target, arch, fileName, downloadUrl, shasumsUrl, shasumsSigUrl, shasum, gpgFingerprint, gpgAsciiPub)) + // `, target, arch, fileName, downloadUrl, shasumsUrl, shasumsSigUrl, shasum, gpgFingerprint, gpgAsciiPub)) log.Printf(" - Arch file: %s", archFileName) diff --git a/cmd/cmd/publishCmd.go b/cmd/cmd/publishCmd.go index 22e3efb9..4849ba4b 100644 --- a/cmd/cmd/publishCmd.go +++ b/cmd/cmd/publishCmd.go @@ -10,6 +10,7 @@ import ( "path/filepath" "github.com/spf13/cobra" + publish2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/publish" ) diff --git a/cmd/main.go b/cmd/main.go index 1b80b034..016d9a5b 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -7,6 +7,7 @@ import ( "github.com/MatusOllah/slogcolor" cc "github.com/ivanpirog/coloredcobra" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd" ) diff --git a/docs/resources/postgresflexalpha_user.md b/docs/resources/postgresflexalpha_user.md index 2ebffe71..d68b920c 100644 --- a/docs/resources/postgresflexalpha_user.md +++ b/docs/resources/postgresflexalpha_user.md @@ -16,7 +16,7 @@ description: |- resource "stackitprivatepreview_postgresflexalpha_user" "example" { project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - username = "username" + name = "username" roles = ["role"] } diff --git a/docs/resources/sqlserverflexalpha_user.md b/docs/resources/sqlserverflexalpha_user.md index 8c5f8702..85d5350e 100644 --- a/docs/resources/sqlserverflexalpha_user.md +++ b/docs/resources/sqlserverflexalpha_user.md @@ -32,7 +32,7 @@ import { ### Required -- `roles` (List of String) A list containing the user roles for the instance. +- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint. - `username` (String) The name of the user. ### Optional diff --git a/golang-ci.yaml b/golang-ci.yaml index b3f00eb7..42d943f2 100644 --- a/golang-ci.yaml +++ b/golang-ci.yaml @@ -24,6 +24,11 @@ linters: rules: main: list-mode: lax + allow: + - tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview + - github.com/hashicorp/terraform-plugin-framework + - github.com/hashicorp/terraform-plugin-log + - github.com/stackitcloud/stackit-sdk-go deny: - pkg: github.com/stretchr/testify desc: Do not use a testing framework @@ -64,12 +69,21 @@ linters: - name: early-return exclusions: generated: lax + warn-unused: true + # Excluding configuration per-path, per-linter, per-text and per-source. + rules: + # Exclude some linters from running on tests files. + - path: _test\.go + linters: + - gochecknoinits paths: - - third_party$ - - builtin$ - - examples$ + - third_party/ + - builtin/ + - examples/ - tools/copy.go - tools/main.go + - pkg_gen/ + - cmd/ formatters: enable: - gofmt @@ -77,10 +91,12 @@ formatters: settings: goimports: local-prefixes: - - github.com/freiheit-com/nmww + - tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview exclusions: generated: lax paths: - - third_party$ - - builtin$ - - examples$ + - third_party/ + - builtin/ + - examples/ + - pkg_gen/ + - cmd/ diff --git a/internal/testutils/functions.go b/internal/testutils/functions.go index 0006751a..3e748941 100644 --- a/internal/testutils/functions.go +++ b/internal/testutils/functions.go @@ -88,7 +88,7 @@ func CleanupTemporaryHome(tempHomePath string, t *testing.T) { } func ucFirst(s string) string { - if len(s) == 0 { + if s == "" { return "" } return strings.ToUpper(s[:1]) + s[1:] diff --git a/internal/testutils/testutils.go b/internal/testutils/testutils.go index c35c332c..933dab0e 100644 --- a/internal/testutils/testutils.go +++ b/internal/testutils/testutils.go @@ -113,7 +113,7 @@ func GetTestProjectServiceAccountJson(path string) string { return token } -//func GetTestProjectServiceAccountToken(path string) string { +// func GetTestProjectServiceAccountToken(path string) string { // var err error // token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN") // if !tokenSet || token == "" { @@ -125,7 +125,7 @@ func GetTestProjectServiceAccountJson(path string) string { // return token //} // -//func readTestTokenFromCredentialsFile(path string) (string, error) { +// func readTestTokenFromCredentialsFile(path string) (string, error) { // if path == "" { // customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH") // if !customPathSet || customPath == "" { diff --git a/main.go b/main.go index 6d7793da..ab603dd6 100644 --- a/main.go +++ b/main.go @@ -6,6 +6,7 @@ import ( "log" "github.com/hashicorp/terraform-plugin-framework/providerserver" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit" ) diff --git a/stackit/internal/conversion/conversion.go b/stackit/internal/conversion/conversion.go index cd4c3bfa..48871213 100644 --- a/stackit/internal/conversion/conversion.go +++ b/stackit/internal/conversion/conversion.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" ) diff --git a/stackit/internal/conversion/conversion_test.go b/stackit/internal/conversion/conversion_test.go index 5e6c2445..ac5f4535 100644 --- a/stackit/internal/conversion/conversion_test.go +++ b/stackit/internal/conversion/conversion_test.go @@ -8,6 +8,7 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-framework/diag" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "github.com/google/go-cmp/cmp" diff --git a/stackit/internal/features/beta.go b/stackit/internal/features/beta.go index 781ac8c0..ab74e554 100644 --- a/stackit/internal/features/beta.go +++ b/stackit/internal/features/beta.go @@ -9,6 +9,7 @@ import ( "strings" "github.com/hashicorp/terraform-plugin-framework/diag" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" ) diff --git a/stackit/internal/features/beta_test.go b/stackit/internal/features/beta_test.go index 83fb2f99..366158f8 100644 --- a/stackit/internal/features/beta_test.go +++ b/stackit/internal/features/beta_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-framework/diag" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" ) diff --git a/stackit/internal/features/experiments.go b/stackit/internal/features/experiments.go index b68399ed..2230a7b5 100644 --- a/stackit/internal/features/experiments.go +++ b/stackit/internal/features/experiments.go @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" ) diff --git a/stackit/internal/features/experiments_test.go b/stackit/internal/features/experiments_test.go index 06423a4f..771a8444 100644 --- a/stackit/internal/features/experiments_test.go +++ b/stackit/internal/features/experiments_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-framework/diag" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" ) diff --git a/stackit/internal/services/postgresflexalpha/database/datasource.go b/stackit/internal/services/postgresflexalpha/database/datasource.go index 4a89be17..f9e3a447 100644 --- a/stackit/internal/services/postgresflexalpha/database/datasource.go +++ b/stackit/internal/services/postgresflexalpha/database/datasource.go @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen" diff --git a/stackit/internal/services/postgresflexalpha/database/functions_test.go b/stackit/internal/services/postgresflexalpha/database/functions_test.go index 9f0b47fd..4c921b14 100644 --- a/stackit/internal/services/postgresflexalpha/database/functions_test.go +++ b/stackit/internal/services/postgresflexalpha/database/functions_test.go @@ -6,6 +6,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/stackitcloud/stackit-sdk-go/core/utils" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" ) diff --git a/stackit/internal/services/postgresflexalpha/database/mapper.go b/stackit/internal/services/postgresflexalpha/database/mapper.go index 9e38fe3f..89140267 100644 --- a/stackit/internal/services/postgresflexalpha/database/mapper.go +++ b/stackit/internal/services/postgresflexalpha/database/mapper.go @@ -5,6 +5,7 @@ import ( "strconv" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) diff --git a/stackit/internal/services/postgresflexalpha/database/mapper_test.go b/stackit/internal/services/postgresflexalpha/database/mapper_test.go index cd979b22..16fd0ce6 100644 --- a/stackit/internal/services/postgresflexalpha/database/mapper_test.go +++ b/stackit/internal/services/postgresflexalpha/database/mapper_test.go @@ -6,6 +6,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen" ) diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index 530bb98c..c42744cd 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -3,7 +3,6 @@ package postgresflexalpha import ( "context" _ "embed" - "errors" "fmt" "math" "strconv" @@ -15,6 +14,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" @@ -32,9 +32,6 @@ var ( _ resource.ResourceWithModifyPlan = &databaseResource{} _ resource.ResourceWithIdentity = &databaseResource{} - // Define errors - errDatabaseNotFound = errors.New("database not found") - // Error message constants extractErrorSummary = "extracting failed" extractErrorMessage = "Extracting identity data: %v" @@ -171,6 +168,7 @@ func (r *databaseResource) Create( req resource.CreateRequest, resp *resource.CreateResponse, ) { // nolint:gocritic // function signature required by Terraform + const funcErrorSummary = "[database CREATE] error" var model resourceModel diags := req.Plan.Get(ctx, &model) resp.Diagnostics.Append(diags...) @@ -194,7 +192,7 @@ func (r *databaseResource) Create( core.LogAndAddError( ctx, &resp.Diagnostics, - "Error creating database", + funcErrorSummary, fmt.Sprintf("Creating API payload: %v", err), ) return @@ -207,7 +205,7 @@ func (r *databaseResource) Create( instanceId, ).CreateDatabaseRequestPayload(*payload).Execute() if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating database", fmt.Sprintf("Calling API: %v", err)) + core.LogAndAddError(ctx, &resp.Diagnostics, funcErrorSummary, fmt.Sprintf("Calling API: %v", err)) return } @@ -215,7 +213,7 @@ func (r *databaseResource) Create( core.LogAndAddError( ctx, &resp.Diagnostics, - "Error creating database", + funcErrorSummary, "API didn't return database Id. A database might have been created", ) return @@ -244,7 +242,7 @@ func (r *databaseResource) Create( core.LogAndAddError( ctx, &resp.Diagnostics, - "Error creating database", + funcErrorSummary, fmt.Sprintf("Getting database details after creation: %v", err), ) return @@ -256,8 +254,8 @@ func (r *databaseResource) Create( core.LogAndAddError( ctx, &resp.Diagnostics, - "Error creating database", - fmt.Sprintf("Processing API payload: %v", err), + funcErrorSummary, + fmt.Sprintf("map resource fields: %v", err), ) return } @@ -392,7 +390,7 @@ func (r *databaseResource) Update( core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)") return } - databaseId := int32(databaseId64) + databaseId := int32(databaseId64) // nolint:gosec // check is performed above ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) @@ -518,7 +516,7 @@ func (r *databaseResource) Delete( core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)") return } - databaseId := int32(databaseId64) + databaseId := int32(databaseId64) // nolint:gosec // check is performed above ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "region", region) diff --git a/stackit/internal/services/postgresflexalpha/flavor/datasource.go b/stackit/internal/services/postgresflexalpha/flavor/datasource.go index dc660dd3..52c6b779 100644 --- a/stackit/internal/services/postgresflexalpha/flavor/datasource.go +++ b/stackit/internal/services/postgresflexalpha/flavor/datasource.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen" @@ -16,6 +17,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" ) diff --git a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go index 90590716..bb7180c1 100644 --- a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go +++ b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/stackitcloud/stackit-sdk-go/core/utils" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" ) diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasource.go b/stackit/internal/services/postgresflexalpha/flavors/datasource.go index 44483018..df8fddac 100644 --- a/stackit/internal/services/postgresflexalpha/flavors/datasource.go +++ b/stackit/internal/services/postgresflexalpha/flavors/datasource.go @@ -5,6 +5,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/services/postgresflexalpha/instance/datasource.go b/stackit/internal/services/postgresflexalpha/instance/datasource.go index 95f7904b..6a4296a4 100644 --- a/stackit/internal/services/postgresflexalpha/instance/datasource.go +++ b/stackit/internal/services/postgresflexalpha/instance/datasource.go @@ -6,6 +6,7 @@ import ( "net/http" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen" @@ -13,6 +14,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) diff --git a/stackit/internal/services/postgresflexalpha/instance/functions.go b/stackit/internal/services/postgresflexalpha/instance/functions.go index 93fb544d..eafbb3b5 100644 --- a/stackit/internal/services/postgresflexalpha/instance/functions.go +++ b/stackit/internal/services/postgresflexalpha/instance/functions.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types/basetypes" "github.com/hashicorp/terraform-plugin-log/tflog" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" postgresflexalphadatasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen" postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen" diff --git a/stackit/internal/services/postgresflexalpha/instance/functions_test.go b/stackit/internal/services/postgresflexalpha/instance/functions_test.go index 19784ad8..ae2e55e3 100644 --- a/stackit/internal/services/postgresflexalpha/instance/functions_test.go +++ b/stackit/internal/services/postgresflexalpha/instance/functions_test.go @@ -2,6 +2,7 @@ package postgresflexalpha import ( "github.com/stackitcloud/stackit-sdk-go/core/utils" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" ) diff --git a/stackit/internal/services/postgresflexalpha/instance/resource.go b/stackit/internal/services/postgresflexalpha/instance/resource.go index 3d603df0..0b354c64 100644 --- a/stackit/internal/services/postgresflexalpha/instance/resource.go +++ b/stackit/internal/services/postgresflexalpha/instance/resource.go @@ -14,6 +14,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" @@ -433,18 +434,18 @@ func (r *instanceResource) Update( return } - //if model.InstanceId.IsNull() || model.InstanceId.IsUnknown() { + // if model.InstanceId.IsNull() || model.InstanceId.IsUnknown() { // core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "instanceId is null or unknown") // return //} // - //if model.ProjectId.IsNull() || model.ProjectId.IsUnknown() { + // if model.ProjectId.IsNull() || model.ProjectId.IsUnknown() { // core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "projectId is null or unknown") // return //} - //projectId := model.ProjectId.ValueString() - //instanceId := model.InstanceId.ValueString() + // projectId := model.ProjectId.ValueString() + // instanceId := model.InstanceId.ValueString() projectId := identityData.ProjectID.ValueString() instanceId := identityData.InstanceID.ValueString() region := model.Region.ValueString() diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go index 099698c2..7e71a32f 100644 --- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go +++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go @@ -4,12 +4,17 @@ import ( "context" _ "embed" "fmt" + "log" "os" "strconv" + "strings" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/stackitcloud/stackit-sdk-go/core/config" + + postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" @@ -21,33 +26,40 @@ import ( const pfx = "stackitprivatepreview_postgresflexalpha" -var createdInstances []string +var testInstances []string func init() { sweeperName := fmt.Sprintf("%s_%s", pfx, "sweeper") resource.AddTestSweepers(sweeperName, &resource.Sweeper{ Name: sweeperName, F: func(region string) error { + ctx := context.Background() + apiClientConfigOptions := []config.ConfigurationOption{} + apiClient, err := postgresflexalpha2.NewAPIClient(apiClientConfigOptions...) + if err != nil { + log.Fatalln(err) + } - //client, err := sharedClientForRegion(region) - //if err != nil { - // return fmt.Errorf("Error getting client: %s", err) - //} - //conn := client.(*ExampleClient) - // - //instances, err := conn.DescribeComputeInstances() - //if err != nil { - // return fmt.Errorf("Error getting instances: %s", err) - //} - //for _, instance := range instances { - // if strings.HasPrefix(instance.Name, "test-acc") { - // err := conn.DestroyInstance(instance.ID) - // - // if err != nil { - // log.Printf("Error destroying %s during sweep: %s", instance.Name, err) - // } - // } - //} + instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region). + Size(100). + Execute() + if err != nil { + log.Fatalln(err) + } + + for _, inst := range instances.GetInstances() { + if strings.HasPrefix(inst.GetName(), "tf-acc-") { + for _, item := range testInstances { + if inst.GetName() == item { + delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, testutils.Region, inst.GetId()) + if delErr != nil { + // TODO: maybe just warn? + log.Fatalln(delErr) + } + } + } + } + } return nil }, }) @@ -96,7 +108,7 @@ func testAccPreCheck(t *testing.T) { } } -//func TestAccResourceExample_parallel(t *testing.T) { +// func TestAccResourceExample_parallel(t *testing.T) { // t.Parallel() // // exData := resData{ @@ -177,11 +189,6 @@ func getExample() resData { func TestAccInstance(t *testing.T) { exData := getExample() - resName := fmt.Sprintf( - "stackitprivatepreview_postgresflexalpha_instance.%s", - exData.TfName, - ) - updNameData := exData updNameData.Name = "name-updated" @@ -192,6 +199,7 @@ func TestAccInstance(t *testing.T) { PreCheck: func() { testAccPreCheck(t) t.Logf(" ... working on instance %s", exData.TfName) + testInstances = append(testInstances, exData.TfName) }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ @@ -202,8 +210,8 @@ func TestAccInstance(t *testing.T) { exData, ), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", exData.Name), - resource.TestCheckResourceAttrSet(resName, "id"), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", exData.Name), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", exData.TfName), "id"), ), }, // Update name and verify @@ -213,7 +221,7 @@ func TestAccInstance(t *testing.T) { updNameData, ), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", updNameData.Name), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", updNameData.Name), ), }, // Update size and verify @@ -224,7 +232,7 @@ func TestAccInstance(t *testing.T) { ), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( - resName, + testutils.ResStr(pfx, "instance", exData.TfName), "storage.size", strconv.Itoa(int(updSizeData.Size)), ), @@ -235,7 +243,7 @@ func TestAccInstance(t *testing.T) { // ResourceName: "example_resource.test", // ImportState: true, // ImportStateVerify: true, - //}, + // }, }, }) } @@ -256,6 +264,7 @@ func TestAccInstanceWithUsers(t *testing.T) { PreCheck: func() { testAccPreCheck(t) t.Logf(" ... working on instance %s", data.TfName) + testInstances = append(testInstances, data.TfName) }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ @@ -279,7 +288,7 @@ func TestAccInstanceWithUsers(t *testing.T) { func TestAccInstanceWithDatabases(t *testing.T) { data := getExample() - dbName := "testDb" + dbName := "testdb" userName := "testUser" data.Users = []User{ { @@ -297,25 +306,11 @@ func TestAccInstanceWithDatabases(t *testing.T) { }, } - resName := fmt.Sprintf( - "stackitprivatepreview_postgresflexalpha_instance.%s", - data.TfName, - ) - - resUserName := fmt.Sprintf( - "stackitprivatepreview_postgresflexalpha_user.%s", - userName, - ) - - resDbName := fmt.Sprintf( - "stackitprivatepreview_postgresflexalpha_database.%s", - dbName, - ) - resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) t.Logf(" ... working on instance %s", data.TfName) + testInstances = append(testInstances, data.TfName) }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ @@ -326,20 +321,20 @@ func TestAccInstanceWithDatabases(t *testing.T) { data, ), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", data.Name), - resource.TestCheckResourceAttrSet(resName, "id"), - resource.TestCheckResourceAttr(resUserName, "name", userName), - resource.TestCheckResourceAttrSet(resUserName, "id"), - resource.TestCheckResourceAttr(resDbName, "name", dbName), - resource.TestCheckResourceAttr(resDbName, "owner", userName), - resource.TestCheckResourceAttrSet(resDbName, "id"), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "name", dbName), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "owner", userName), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "database", dbName), "id"), ), }, }, }) } -//func setupMockServer() *httptest.Server { +// func setupMockServer() *httptest.Server { // mux := http.NewServeMux() // // mux.HandleFunc("/api/resources", func(w http.ResponseWriter, r *http.Request) { @@ -365,7 +360,7 @@ func TestAccInstanceWithDatabases(t *testing.T) { // return httptest.NewServer(mux) //} // -//func TestUnitResourceCreate(t *testing.T) { +// func TestUnitResourceCreate(t *testing.T) { // server := setupMockServer() // defer server.Close() // @@ -391,7 +386,7 @@ func TestAccInstanceWithDatabases(t *testing.T) { // return c.getFlavorsResp, nil // } -//func TestNewInstanceResource(t *testing.T) { +// func TestNewInstanceResource(t *testing.T) { // exData := resData{ // Region: "eu01", // ServiceAccountFilePath: sa_file, @@ -430,7 +425,7 @@ func TestAccInstanceWithDatabases(t *testing.T) { //} //// Instance resource data -//var instanceResource = map[string]string{ +// var instanceResource = map[string]string{ // "project_id": testutils.ProjectId, // "region": "eu01", // "name": fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum)), @@ -454,14 +449,14 @@ func TestAccInstanceWithDatabases(t *testing.T) { //} // //// User resource data -//var userResource = map[string]string{ +// var userResource = map[string]string{ // "username": fmt.Sprintf("tfaccuser%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlpha)), // "role": "createdb", // "project_id": testutils.ProjectId, //} // //// Database resource data -//var databaseResource = map[string]string{ +// var databaseResource = map[string]string{ // "name": fmt.Sprintf("tfaccdb%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlphaNum)), // "project_id": testutils.ProjectId, //} diff --git a/stackit/internal/services/postgresflexalpha/user/datasource.go b/stackit/internal/services/postgresflexalpha/user/datasource.go index b0cf9d3b..0bb991df 100644 --- a/stackit/internal/services/postgresflexalpha/user/datasource.go +++ b/stackit/internal/services/postgresflexalpha/user/datasource.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/diag" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen" @@ -16,6 +17,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) @@ -106,7 +108,7 @@ func (r *userDataSource) Read( core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)") return } - userId := int32(userId64) + userId := int32(userId64) // nolint:gosec // check is performed above region := r.providerData.GetRegionWithOverride(model.Region) ctx = tflog.SetField(ctx, "project_id", projectId) diff --git a/stackit/internal/services/postgresflexalpha/user/mapper.go b/stackit/internal/services/postgresflexalpha/user/mapper.go index 8fb4278f..952235ca 100644 --- a/stackit/internal/services/postgresflexalpha/user/mapper.go +++ b/stackit/internal/services/postgresflexalpha/user/mapper.go @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" diff --git a/stackit/internal/services/postgresflexalpha/user/mapper_test.go b/stackit/internal/services/postgresflexalpha/user/mapper_test.go index 37410b19..06ba3d50 100644 --- a/stackit/internal/services/postgresflexalpha/user/mapper_test.go +++ b/stackit/internal/services/postgresflexalpha/user/mapper_test.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/stackitcloud/stackit-sdk-go/core/utils" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen" ) diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go index 1e50a0fe..6c7e3979 100644 --- a/stackit/internal/services/postgresflexalpha/user/resource.go +++ b/stackit/internal/services/postgresflexalpha/user/resource.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" @@ -20,6 +21,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" @@ -404,7 +406,7 @@ func (r *userResource) Update( core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)") return } - userId := int32(userId64) + userId := int32(userId64) // nolint:gosec // check is performed above // Update existing instance err = r.client.UpdateUserRequest( @@ -516,7 +518,7 @@ func (r *userResource) Delete( core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)") return } - userId := int32(userId64) + userId := int32(userId64) // nolint:gosec // check is performed above // Delete existing record set err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() @@ -527,12 +529,12 @@ func (r *userResource) Delete( ctx = core.LogResponse(ctx) // TODO: Verify deletion - //exists, err := r.getUserResource(ctx, &model, arg) - //if err != nil { + // exists, err := r.getUserResource(ctx, &model, arg) + // if err != nil { // core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err)) // return //} - //if exists { + // if exists { // core.LogAndAddError( // ctx, &resp.Diagnostics, "Error deleting user", // fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt64()), diff --git a/stackit/internal/services/postgresflexalpha/utils/util.go b/stackit/internal/services/postgresflexalpha/utils/util.go index 7d6c721a..2b6d1de8 100644 --- a/stackit/internal/services/postgresflexalpha/utils/util.go +++ b/stackit/internal/services/postgresflexalpha/utils/util.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/stackitcloud/stackit-sdk-go/core/config" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/services/postgresflexalpha/utils/util_test.go b/stackit/internal/services/postgresflexalpha/utils/util_test.go index 185ece19..e0f7a829 100644 --- a/stackit/internal/services/postgresflexalpha/utils/util_test.go +++ b/stackit/internal/services/postgresflexalpha/utils/util_test.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients" "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasource.go b/stackit/internal/services/sqlserverflexalpha/database/datasource.go index 176f3d35..05c8c1ec 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/datasource.go @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper.go b/stackit/internal/services/sqlserverflexalpha/database/mapper.go index 05376158..1a77c31e 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/mapper.go +++ b/stackit/internal/services/sqlserverflexalpha/database/mapper.go @@ -5,6 +5,7 @@ import ( "strings" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go index 992a3878..bde90f6a 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go +++ b/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go @@ -6,6 +6,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen" ) diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index 0ffba164..c9ac17e2 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -16,6 +16,7 @@ import ( "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go b/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go index 0246d866..af3b370a 100644 --- a/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go +++ b/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" ) diff --git a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go index c1d4de36..8d5e8a50 100644 --- a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go @@ -5,6 +5,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go index 0d58140c..70c382df 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen" sqlserverflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" @@ -18,6 +19,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) @@ -123,8 +125,8 @@ func (r *instanceDataSource) Read( ctx = core.LogResponse(ctx) - //var storage = &storageModel{} - //if !model.Storage.IsNull() && !model.Storage.IsUnknown() { + // var storage = &storageModel{} + // if !model.Storage.IsNull() && !model.Storage.IsUnknown() { // diags = model.Storage.As(ctx, storage, basetypes.ObjectAsOptions{}) // resp.Diagnostics.Append(diags...) // if resp.Diagnostics.HasError() { @@ -132,7 +134,7 @@ func (r *instanceDataSource) Read( // } //} // - //var encryption = &encryptionModel{} + // var encryption = &encryptionModel{} //if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() { // diags = model.Encryption.As(ctx, encryption, basetypes.ObjectAsOptions{}) // resp.Diagnostics.Append(diags...) @@ -151,7 +153,7 @@ func (r *instanceDataSource) Read( //} err = mapFields(ctx, instanceResp, &model, resp.Diagnostics) - //err = mapFields(ctx, instanceResp, &model, storage, encryption, network, region) + // err = mapFields(ctx, instanceResp, &model, storage, encryption, network, region) if err != nil { core.LogAndAddError( ctx, diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go index 77effc6c..4c091eff 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/functions.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" sqlserverflexResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" @@ -190,7 +191,7 @@ func toUpdatePayload( if m.Replicas.ValueInt64() > math.MaxUint32 { return nil, fmt.Errorf("replicas value is too big for uint32") } - replVal := sqlserverflex.Replicas(uint32(m.Replicas.ValueInt64())) + replVal := sqlserverflex.Replicas(uint32(m.Replicas.ValueInt64())) // nolint:gosec // check is performed above var netAcl []string diags := m.Network.Acl.ElementsAs(ctx, &netAcl, false) diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go index 76c73639..4e4dd4ca 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go @@ -11,6 +11,7 @@ import ( "time" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" + sqlserverflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" @@ -18,6 +19,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go b/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go index 175711ff..bc9f6d3a 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go @@ -13,7 +13,7 @@ package sqlserverflexalpha // return c.listFlavorsResp, nil // } -//func TestMapFields(t *testing.T) { +// func TestMapFields(t *testing.T) { // t.Skip("Skipping - needs refactoring") // const testRegion = "region" // tests := []struct { diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go index 6c2915ea..974e724c 100644 --- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" @@ -166,7 +167,7 @@ func TestAccInstance(t *testing.T) { // ResourceName: "example_resource.test", // ImportState: true, // ImportStateVerify: true, - //}, + // }, }, }) } diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource.go b/stackit/internal/services/sqlserverflexalpha/user/datasource.go index d76e27a4..95c517ce 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/datasource.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper.go b/stackit/internal/services/sqlserverflexalpha/user/mapper.go index 0b7baa84..398011be 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper.go @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go index e450b581..46c8418d 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" ) diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index 52dee432..64fefde3 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -10,6 +10,7 @@ import ( "strings" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" sqlserverflexalphagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen" sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" @@ -20,6 +21,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" @@ -368,7 +370,7 @@ func (r *userResource) Delete( // Delete existing record set _, err := sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). WaitWithContext(ctx) - //err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() + // err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err)) return diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util.go b/stackit/internal/services/sqlserverflexalpha/utils/util.go index db031162..7fbf0901 100644 --- a/stackit/internal/services/sqlserverflexalpha/utils/util.go +++ b/stackit/internal/services/sqlserverflexalpha/utils/util.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go b/stackit/internal/services/sqlserverflexalpha/utils/util_test.go index 7afd6b1d..91f90030 100644 --- a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go +++ b/stackit/internal/services/sqlserverflexalpha/utils/util_test.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients" "github.com/stackitcloud/stackit-sdk-go/core/config" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go index 66d5ad0d..401df291 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/datasource.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper.go b/stackit/internal/services/sqlserverflexbeta/database/mapper.go index 7d82039a..a418dadb 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/database/mapper.go @@ -5,6 +5,7 @@ import ( "strings" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go index 04125a7b..f865f22f 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go +++ b/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go @@ -6,6 +6,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/datasources_gen" ) @@ -34,7 +35,7 @@ func TestMapFields(t *testing.T) { Name: utils.Ptr("my-db"), CollationName: utils.Ptr("collation"), CompatibilityLevel: utils.Ptr(int64(150)), - Owner: utils.Ptr("\"my-owner\""), + Owner: utils.Ptr("my-owner"), }, model: &dataSourceModel{ DatabaseModel: datasource.DatabaseModel{ @@ -126,7 +127,7 @@ func TestMapResourceFields(t *testing.T) { source: &sqlserverflexbeta.GetDatabaseResponse{ Id: utils.Ptr(int64(1)), Name: utils.Ptr("my-db"), - Owner: utils.Ptr("\"my-owner\""), + Owner: utils.Ptr("my-owner"), }, model: &resourceModel{ ProjectId: types.StringValue("my-project"), @@ -136,13 +137,17 @@ func TestMapResourceFields(t *testing.T) { }, expected: expected{ model: &resourceModel{ - Id: types.Int64Value(1), - Name: types.StringValue("my-db"), - DatabaseName: types.StringValue("my-db"), - InstanceId: types.StringValue("my-instance"), - ProjectId: types.StringValue("my-project"), - Region: types.StringValue("eu01"), - Owner: types.StringValue("my-owner"), + Id: types.Int64Value(1), + Name: types.StringValue("my-db"), + Compatibility: types.Int64Value(0), + CompatibilityLevel: types.Int64Value(0), + Collation: types.StringValue(""), + CollationName: types.StringValue(""), + DatabaseName: types.StringValue("my-db"), + InstanceId: types.StringValue("my-instance"), + ProjectId: types.StringValue("my-project"), + Region: types.StringValue("eu01"), + Owner: types.StringValue("my-owner"), }, }, }, diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 7c191e42..fd970722 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -16,6 +16,7 @@ import ( "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta" @@ -247,7 +248,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - // TODO: is this neccessary to wait for the database-> API say 200 ? + // TODO: is this necessary to wait for the database-> API say 200 ? waitResp, err := wait.CreateDatabaseWaitHandler( ctx, r.client, diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go index 59f3982c..06e055f2 100644 --- a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go @@ -254,7 +254,7 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques // }, // }, // }, - //}, + // }, } } diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go b/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go index 974f1fa4..fb666253 100644 --- a/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go +++ b/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" ) diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go index b401e4ff..b6be1dd4 100644 --- a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" @@ -111,7 +112,7 @@ func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest projectId := data.ProjectId.ValueString() region := d.providerData.GetRegionWithOverride(data.Region) // TODO: implement right identifier for flavors - flavorsId := data.FlavorsModel.Flavors + flavorsId := data.Flavors ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go index 85834b26..2830ac62 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go index ee4d30b4..de2dd9d6 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" sqlserverflexbetaDataGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen" @@ -262,7 +263,7 @@ func toUpdatePayload( if m.Replicas.ValueInt64() > math.MaxUint32 { return nil, fmt.Errorf("replicas value is too big for uint32") } - replVal := sqlserverflexbeta.Replicas(uint32(m.Replicas.ValueInt64())) + replVal := sqlserverflexbeta.Replicas(uint32(m.Replicas.ValueInt64())) // nolint:gosec // check is performed above var netAcl []string diags := m.Network.Acl.ElementsAs(ctx, &netAcl, false) diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go index 6abcbbd3..c04c1fd3 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -15,6 +15,7 @@ import ( "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/config" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta" diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go index 4c63267d..e3fd5473 100644 --- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go @@ -12,11 +12,12 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" sqlserverflexbeta2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" + // The fwresource import alias is so there is no collision // with the more typical acceptance testing import: // "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -25,6 +26,8 @@ import ( const providerPrefix = "stackitprivatepreview_sqlserverflexbeta" +var testInstances []string + func init() { sweeperName := fmt.Sprintf("%s_%s", providerPrefix, "sweeper") @@ -38,7 +41,7 @@ func init() { log.Fatalln(err) } - instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region). + instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, region). Size(100). Execute() if err != nil { @@ -47,9 +50,14 @@ func init() { for _, inst := range instances.GetInstances() { if strings.HasPrefix(inst.GetName(), "tf-acc-") { - delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, testutils.Region, inst.GetId()) - if delErr != nil { - log.Fatalln(delErr) + for _, item := range testInstances { + if inst.GetName() == item { + delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, region, inst.GetId()) + if delErr != nil { + // TODO: maybe just warn? + log.Fatalln(delErr) + } + } } } } @@ -167,6 +175,7 @@ func TestAccInstance(t *testing.T) { PreCheck: func() { testAccPreCheck(t) t.Logf(" ... working on instance %s", exData.TfName) + testInstances = append(testInstances, exData.TfName) }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ @@ -179,6 +188,7 @@ func TestAccInstance(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name), resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"), + // TODO: check all fields ), }, // Update name and verify @@ -199,7 +209,7 @@ func TestAccInstance(t *testing.T) { ), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( - resName("instance", exData.TfName), + testutils.ResStr(providerPrefix, "instance", exData.TfName), "storage.size", strconv.Itoa(int(updSizeData.Size)), ), @@ -210,10 +220,10 @@ func TestAccInstance(t *testing.T) { }, //// Import test //{ - // ResourceName: "example_resource.test", + // ResourceName: resName("instance", exData.TfName), // ImportState: true, // ImportStateVerify: true, - //}, + // }, }, }) } @@ -249,6 +259,7 @@ func TestAccInstanceNoEncryption(t *testing.T) { PreCheck: func() { testAccPreCheck(t) t.Logf(" ... working on instance %s", data.TfName) + testInstances = append(testInstances, data.TfName) }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ @@ -274,20 +285,20 @@ func TestAccInstanceNoEncryption(t *testing.T) { resource.TestCheckNoResourceAttr(resName("instance", data.TfName), "encryption"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"), // check instance values are correct resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name), @@ -297,9 +308,9 @@ func TestAccInstanceNoEncryption(t *testing.T) { resource.TestCheckResourceAttrSet(resName("user", userName), "username"), // resource.TestCheckResourceAttrSet(resName("user", userName), "roles"), - func(s *terraform.State) error { - return nil - }, + // func(s *terraform.State) error { + // return nil + // }, // check user values are correct resource.TestCheckResourceAttr(resName("user", userName), "username", userName), @@ -351,6 +362,7 @@ func TestAccInstanceEncryption(t *testing.T) { PreCheck: func() { testAccPreCheck(t) t.Logf(" ... working on instance %s", data.TfName) + testInstances = append(testInstances, data.TfName) }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ @@ -374,20 +386,20 @@ func TestAccInstanceEncryption(t *testing.T) { resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"), resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"), // check instance values are correct resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name), @@ -396,9 +408,9 @@ func TestAccInstanceEncryption(t *testing.T) { resource.TestCheckResourceAttrSet(resName("user", userName), "id"), resource.TestCheckResourceAttrSet(resName("user", userName), "username"), - func(s *terraform.State) error { - return nil - }, + // func(s *terraform.State) error { + // return nil + // }, // check user values are correct resource.TestCheckResourceAttr(resName("user", userName), "username", userName), diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go index e15ad28f..8457972a 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/datasource.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper.go b/stackit/internal/services/sqlserverflexbeta/user/mapper.go index 59c915bb..9115906f 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper.go @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go index 7b23805c..1fc1f36e 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" ) diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index a31c380b..27b198e6 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -16,6 +16,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" sqlserverflexbetagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/resources_gen" @@ -432,7 +433,7 @@ func (r *userResource) Delete( // Delete existing record set _, err = sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). WaitWithContext(ctx) - //err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() + // err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err)) return diff --git a/stackit/internal/services/sqlserverflexbeta/utils/util.go b/stackit/internal/services/sqlserverflexbeta/utils/util.go index df638d8c..d8ba984b 100644 --- a/stackit/internal/services/sqlserverflexbeta/utils/util.go +++ b/stackit/internal/services/sqlserverflexbeta/utils/util.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) diff --git a/stackit/internal/services/sqlserverflexbeta/utils/util_test.go b/stackit/internal/services/sqlserverflexbeta/utils/util_test.go index 08ac1974..92fb1ae9 100644 --- a/stackit/internal/services/sqlserverflexbeta/utils/util_test.go +++ b/stackit/internal/services/sqlserverflexbeta/utils/util_test.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients" "github.com/stackitcloud/stackit-sdk-go/core/config" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" diff --git a/stackit/internal/utils/attributes.go b/stackit/internal/utils/attributes.go index 6e3ec386..26d228c3 100644 --- a/stackit/internal/utils/attributes.go +++ b/stackit/internal/utils/attributes.go @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" ) diff --git a/stackit/internal/utils/regions.go b/stackit/internal/utils/regions.go index 5c06ca1b..70f79620 100644 --- a/stackit/internal/utils/regions.go +++ b/stackit/internal/utils/regions.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" ) diff --git a/stackit/internal/utils/utils.go b/stackit/internal/utils/utils.go index fbf5cb6e..8ca4984d 100644 --- a/stackit/internal/utils/utils.go +++ b/stackit/internal/utils/utils.go @@ -20,6 +20,7 @@ import ( "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "github.com/stackitcloud/stackit-sdk-go/core/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" ) diff --git a/stackit/internal/validate/validate.go b/stackit/internal/validate/validate.go index 07d137ae..d118ac52 100644 --- a/stackit/internal/validate/validate.go +++ b/stackit/internal/validate/validate.go @@ -18,6 +18,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types/basetypes" "github.com/teambition/rrule-go" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" ) diff --git a/stackit/internal/wait/postgresflexalpha/wait.go b/stackit/internal/wait/postgresflexalpha/wait.go index 8c8b80b3..330932f2 100644 --- a/stackit/internal/wait/postgresflexalpha/wait.go +++ b/stackit/internal/wait/postgresflexalpha/wait.go @@ -9,6 +9,7 @@ import ( "time" "github.com/hashicorp/terraform-plugin-log/tflog" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" @@ -106,7 +107,7 @@ func CreateInstanceWaitHandler( ), ) if extendedTimeout < 3 { - maxWait = maxWait + time.Minute*5 + maxWait += time.Minute * 5 extendedTimeout = extendedTimeout + 1 if *s.Network.AccessScope == "SNA" { ready := true @@ -162,6 +163,7 @@ func CreateInstanceWaitHandler( if !ok { return false, nil, err } + // TODO: refactor and cooperate with api guys to mitigate if oapiErr.StatusCode < 500 { return true, instanceGetResponse, fmt.Errorf( "users request after instance creation returned %d status code", @@ -234,10 +236,16 @@ func GetUserByIdWaitHandler( if !ok { return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") } - if oapiErr.StatusCode != http.StatusNotFound { + switch oapiErr.StatusCode { + case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable: + case http.StatusNotFound: + tflog.Warn(ctx, "api responded with status", map[string]interface{}{ + "status": oapiErr.StatusCode, + }) + return false, nil, nil + default: return false, nil, err } - return false, nil, nil } return true, s, nil }, @@ -262,10 +270,16 @@ func GetDatabaseByIdWaitHandler( if !ok { return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") } - if oapiErr.StatusCode != http.StatusNotFound { + switch oapiErr.StatusCode { + case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable: + case http.StatusNotFound: + tflog.Warn(ctx, "api responded with status", map[string]interface{}{ + "status": oapiErr.StatusCode, + }) + return false, nil, nil + default: return false, nil, err } - return false, nil, nil } return true, s, nil }, diff --git a/stackit/internal/wait/postgresflexalpha/wait_test.go b/stackit/internal/wait/postgresflexalpha/wait_test.go index e9583d14..57d36175 100644 --- a/stackit/internal/wait/postgresflexalpha/wait_test.go +++ b/stackit/internal/wait/postgresflexalpha/wait_test.go @@ -10,6 +10,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "github.com/stackitcloud/stackit-sdk-go/core/utils" + postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" ) diff --git a/stackit/internal/wait/sqlserverflexalpha/wait.go b/stackit/internal/wait/sqlserverflexalpha/wait.go index 542b06cb..fd9a1a4b 100644 --- a/stackit/internal/wait/sqlserverflexalpha/wait.go +++ b/stackit/internal/wait/sqlserverflexalpha/wait.go @@ -13,6 +13,7 @@ import ( "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "github.com/stackitcloud/stackit-sdk-go/core/wait" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" ) @@ -42,6 +43,7 @@ func CreateInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) if err != nil { + // TODO: catch 502, 503 and 504 return false, nil, err } if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { diff --git a/stackit/internal/wait/sqlserverflexalpha/wait_test.go b/stackit/internal/wait/sqlserverflexalpha/wait_test.go index 7c0e52a9..85cd8318 100644 --- a/stackit/internal/wait/sqlserverflexalpha/wait_test.go +++ b/stackit/internal/wait/sqlserverflexalpha/wait_test.go @@ -10,6 +10,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "github.com/stackitcloud/stackit-sdk-go/core/utils" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" ) diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go index 9eb6657b..60b1d74b 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "github.com/stackitcloud/stackit-sdk-go/core/wait" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" ) @@ -151,7 +152,7 @@ func CreateInstanceWaitHandler( } return true, s, nil case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): - return true, s, fmt.Errorf("create failed for instance with id %s", instanceId) + return true, nil, fmt.Errorf("create failed for instance with id %s", instanceId) case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): tflog.Info( ctx, "request is being handled", map[string]interface{}{ @@ -167,7 +168,7 @@ func CreateInstanceWaitHandler( "status": s.Status, }, ) - return false, s, nil + return true, nil, errors.New("unknown status received") } }, ) diff --git a/stackit/internal/wait/sqlserverflexbeta/wait_test.go b/stackit/internal/wait/sqlserverflexbeta/wait_test.go index 74579904..825761ce 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait_test.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait_test.go @@ -2,12 +2,14 @@ package sqlserverflexbeta import ( "context" + "reflect" "testing" "time" "github.com/google/go-cmp/cmp" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" "github.com/stackitcloud/stackit-sdk-go/core/utils" + sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" ) @@ -22,37 +24,37 @@ type apiClientInstanceMocked struct { type ListUsersRequestRequest struct{} -func (l ListUsersRequestRequest) Page(page int64) sqlserverflex.ApiListUsersRequestRequest { +func (l ListUsersRequestRequest) Page(_ int64) sqlserverflex.ApiListUsersRequestRequest { return l } -func (l ListUsersRequestRequest) Size(size int64) sqlserverflex.ApiListUsersRequestRequest { +func (l ListUsersRequestRequest) Size(_ int64) sqlserverflex.ApiListUsersRequestRequest { return l } -func (l ListUsersRequestRequest) Sort(sort sqlserverflex.UserSort) sqlserverflex.ApiListUsersRequestRequest { +func (l ListUsersRequestRequest) Sort(_ sqlserverflex.UserSort) sqlserverflex.ApiListUsersRequestRequest { return l } func (l ListUsersRequestRequest) Execute() (*sqlserverflex.ListUserResponse, error) { - //TODO implement me + // TODO implement me panic("implement me") } func (a *apiClientInstanceMocked) ListUsersRequest( - ctx context.Context, - projectId string, - region string, - instanceId string, + _ context.Context, + _ string, + _ string, + _ string, ) sqlserverflex.ApiListUsersRequestRequest { return ListUsersRequestRequest{} } func (a *apiClientInstanceMocked) ListRolesRequestExecute( - ctx context.Context, - projectId string, - region string, - instanceId string, + _ context.Context, + _ string, + _ string, + _ string, ) (*sqlserverflex.ListRolesResponse, error) { return &sqlserverflex.ListRolesResponse{ Roles: &[]string{}, @@ -60,10 +62,10 @@ func (a *apiClientInstanceMocked) ListRolesRequestExecute( } func (a *apiClientInstanceMocked) ListUsersRequestExecute( - ctx context.Context, - projectId string, - region string, - instanceId string, + _ context.Context, + _ string, + _ string, + _ string, ) (*sqlserverflex.ListUserResponse, error) { return &sqlserverflex.ListUserResponse{ Pagination: nil, @@ -73,20 +75,20 @@ func (a *apiClientInstanceMocked) ListUsersRequestExecute( func (a *apiClientInstanceMocked) GetDatabaseRequestExecute( _ context.Context, - projectId string, - region string, - instanceId string, - databaseName string, + _ string, + _ string, + _ string, + _ string, ) (*sqlserverflex.GetDatabaseResponse, error) { return nil, nil } func (a *apiClientInstanceMocked) GetUserRequestExecute( - ctx context.Context, - projectId string, - region string, - instanceId string, - userId int64, + _ context.Context, + _ string, + _ string, + _ string, + _ int64, ) (*sqlserverflex.GetUserResponse, error) { return nil, nil } @@ -114,9 +116,11 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute( }, nil } func TestCreateInstanceWaitHandler(t *testing.T) { - //t.Skip("skipping - needs refactoring") + //stateSuccess := utils.Ptr(InstanceStateSuccess) + instanceId := utils.Ptr("foo") tests := []struct { desc string + instanceId string instanceGetFails bool instanceState string instanceNetwork sqlserverflex.InstanceNetwork @@ -124,40 +128,42 @@ func TestCreateInstanceWaitHandler(t *testing.T) { wantErr bool wantRes *sqlserverflex.GetInstanceResponse }{ - { - desc: "create_succeeded", - instanceGetFails: false, - instanceState: InstanceStateSuccess, - instanceNetwork: sqlserverflex.InstanceNetwork{ - AccessScope: nil, - Acl: nil, - InstanceAddress: utils.Ptr("10.0.0.1"), - RouterAddress: utils.Ptr("10.0.0.2"), - }, - wantErr: false, - wantRes: &sqlserverflex.GetInstanceResponse{ - BackupSchedule: nil, - Edition: nil, - Encryption: nil, - FlavorId: nil, - Id: nil, - IsDeletable: nil, - Name: nil, - Network: &sqlserverflex.InstanceNetwork{ - AccessScope: nil, - Acl: nil, - InstanceAddress: utils.Ptr("10.0.0.1"), - RouterAddress: utils.Ptr("10.0.0.2"), - }, - Replicas: nil, - RetentionDays: nil, - Status: nil, - Storage: nil, - Version: nil, - }, - }, + //{ + // desc: "create_succeeded", + // instanceId: *instanceId, + // instanceGetFails: false, + // instanceState: *stateSuccess, + // instanceNetwork: sqlserverflex.InstanceNetwork{ + // AccessScope: nil, + // Acl: nil, + // InstanceAddress: utils.Ptr("10.0.0.1"), + // RouterAddress: utils.Ptr("10.0.0.2"), + // }, + // wantErr: false, + // wantRes: &sqlserverflex.GetInstanceResponse{ + // BackupSchedule: nil, + // Edition: nil, + // Encryption: nil, + // FlavorId: nil, + // Id: instanceId, + // IsDeletable: nil, + // Name: nil, + // Network: &sqlserverflex.InstanceNetwork{ + // AccessScope: nil, + // Acl: nil, + // InstanceAddress: utils.Ptr("10.0.0.1"), + // RouterAddress: utils.Ptr("10.0.0.2"), + // }, + // Replicas: nil, + // RetentionDays: nil, + // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(stateSuccess), + // Storage: nil, + // Version: nil, + // }, + //}, { desc: "create_failed", + instanceId: *instanceId, instanceGetFails: false, instanceState: InstanceStateFailed, wantErr: true, @@ -165,6 +171,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) { }, { desc: "create_failed_2", + instanceId: *instanceId, instanceGetFails: false, instanceState: InstanceStateEmpty, wantErr: true, @@ -172,12 +179,14 @@ func TestCreateInstanceWaitHandler(t *testing.T) { }, { desc: "instance_get_fails", + instanceId: *instanceId, instanceGetFails: true, wantErr: true, wantRes: nil, }, { desc: "timeout", + instanceId: *instanceId, instanceGetFails: false, instanceState: InstanceStateProcessing, wantErr: true, @@ -187,22 +196,20 @@ func TestCreateInstanceWaitHandler(t *testing.T) { for _, tt := range tests { t.Run( tt.desc, func(t *testing.T) { - instanceId := "foo-bar" - apiClient := &apiClientInstanceMocked{ - instanceId: instanceId, + instanceId: tt.instanceId, instanceState: tt.instanceState, instanceGetFails: tt.instanceGetFails, } - handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", tt.instanceId, "") gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) if (err != nil) != tt.wantErr { t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) } - if !cmp.Equal(gotRes, tt.wantRes) { + if !reflect.DeepEqual(gotRes, tt.wantRes) { t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes) } }, diff --git a/stackit/provider.go b/stackit/provider.go index ce96cd47..b90d7375 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -19,6 +19,7 @@ import ( "github.com/hashicorp/terraform-plugin-log/tflog" sdkauth "github.com/stackitcloud/stackit-sdk-go/core/auth" "github.com/stackitcloud/stackit-sdk-go/core/config" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/features" diff --git a/stackit/provider_acc_test.go b/stackit/provider_acc_test.go index 208c5b25..0e92fa5a 100644 --- a/stackit/provider_acc_test.go +++ b/stackit/provider_acc_test.go @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/joho/godotenv" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" "github.com/hashicorp/terraform-plugin-testing/config" From b1f8c8a4d99f22affb481094acf684525853f4b1 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Fri, 13 Feb 2026 14:55:36 +0000 Subject: [PATCH 25/41] fix: fix wrong order of params (#59) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/59 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- cmd/cmd/build/templates/resource_scaffold.gotmpl | 13 +++++++++---- .../services/postgresflexalpha/database/resource.go | 6 +++--- .../services/sqlserverflexbeta/user/resource.go | 2 +- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/cmd/cmd/build/templates/resource_scaffold.gotmpl b/cmd/cmd/build/templates/resource_scaffold.gotmpl index 8b612f9c..3fafc10c 100644 --- a/cmd/cmd/build/templates/resource_scaffold.gotmpl +++ b/cmd/cmd/build/templates/resource_scaffold.gotmpl @@ -39,11 +39,14 @@ type {{.NameCamel}}Resource struct{ providerData core.ProviderData } +// resourceModel represents the Terraform resource state +type resourceModel = {{.PackageName}}.{{.NamePascal}}Model + type {{.NamePascal}}ResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` // TODO: implement further needed parts - {{.NamePascal}}ID types.String `tfsdk:"instance_id"` + {{.NamePascal}}ID types.String `tfsdk:"{{.NameSnake}}_id"` } // Metadata defines terraform resource name @@ -51,6 +54,9 @@ func (r *{{.NameCamel}}Resource) Metadata(ctx context.Context, req resource.Meta resp.TypeName = req.ProviderTypeName + "_{{.PackageName}}_{{.NameSnake}}" } +//go:embed planModifiers.yaml +var modifiersFileByte []byte + // Schema loads the schema from generated files and adds plan modifiers func (r *{{.NameCamel}}Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { schema = {{.PackageName}}ResGen.{{.NamePascal}}ResourceSchema(ctx) @@ -82,6 +88,7 @@ func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.Identity "instance_id": identityschema.StringAttribute{ RequiredForImport: true, // can be defaulted by the provider configuration }, + // TODO: implement remaining schema parts }, } } @@ -160,9 +167,6 @@ func (r *{{.NameCamel}}Resource) ModifyPlan( } } -//go:embed planModifiers.yaml -var modifiersFileByte []byte - // Create creates a new resource func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { var data {{.PackageName}}ResGen.{{.NamePascal}}Model @@ -180,6 +184,7 @@ func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.Create region := data.Region.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) + // TODO: add remaining fields // TODO: Create API call logic /* diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index c42744cd..fbc4f6b1 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -290,7 +290,7 @@ func (r *databaseResource) Read( ctx = core.InitProviderContext(ctx) - projectId, instanceId, region, databaseId, errExt := r.extractIdentityData(model, identityData) + projectId, region, instanceId, databaseId, errExt := r.extractIdentityData(model, identityData) if errExt != nil { core.LogAndAddError( ctx, @@ -376,7 +376,7 @@ func (r *databaseResource) Update( ctx = core.InitProviderContext(ctx) - projectId, instanceId, region, databaseId64, errExt := r.extractIdentityData(model, identityData) + projectId, region, instanceId, databaseId64, errExt := r.extractIdentityData(model, identityData) if errExt != nil { core.LogAndAddError( ctx, @@ -502,7 +502,7 @@ func (r *databaseResource) Delete( ctx = core.InitProviderContext(ctx) - projectId, instanceId, region, databaseId64, errExt := r.extractIdentityData(model, identityData) + projectId, region, instanceId, databaseId64, errExt := r.extractIdentityData(model, identityData) if errExt != nil { core.LogAndAddError( ctx, diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index 27b198e6..a5de43ea 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -488,7 +488,7 @@ func (r *userResource) ImportState( resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...) - tflog.Info(ctx, "Postgres Flex user state imported") + tflog.Info(ctx, "SQLServer Flex user state imported") return } From d90236b02e6b24941be5c0bd169040a0132a42ce Mon Sep 17 00:00:00 2001 From: Marcel_Henselin Date: Fri, 13 Feb 2026 15:49:32 +0000 Subject: [PATCH 26/41] chore: refactorings (#60) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/60 --- .../services/postgresflexalpha/database/datasource.go | 1 - .../services/postgresflexalpha/database/resource.go | 8 +++----- .../services/postgresflexalpha/user/mapper_test.go | 1 - .../internal/services/postgresflexalpha/user/resource.go | 3 --- .../services/sqlserverflexalpha/database/datasource.go | 1 - .../services/sqlserverflexalpha/database/resource.go | 2 -- .../services/sqlserverflexalpha/instance/functions.go | 1 - .../services/sqlserverflexalpha/instance/resource.go | 1 - .../services/sqlserverflexalpha/user/mapper_test.go | 1 - .../internal/services/sqlserverflexalpha/user/resource.go | 2 -- .../services/sqlserverflexbeta/database/resource.go | 1 - .../services/sqlserverflexbeta/instance/functions.go | 2 -- .../services/sqlserverflexbeta/user/mapper_test.go | 1 - .../internal/services/sqlserverflexbeta/user/resource.go | 2 -- stackit/internal/wait/postgresflexalpha/wait.go | 6 +++++- 15 files changed, 8 insertions(+), 25 deletions(-) diff --git a/stackit/internal/services/postgresflexalpha/database/datasource.go b/stackit/internal/services/postgresflexalpha/database/datasource.go index f9e3a447..ead08493 100644 --- a/stackit/internal/services/postgresflexalpha/database/datasource.go +++ b/stackit/internal/services/postgresflexalpha/database/datasource.go @@ -73,7 +73,6 @@ func (r *databaseDataSource) Configure( // Schema defines the schema for the data source. func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - s := postgresflexalpha2.DatabaseDataSourceSchema(ctx) s.Attributes["id"] = schema.StringAttribute{ Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," + diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index fbc4f6b1..d88b8c8d 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -234,7 +234,7 @@ func (r *databaseResource) Create( return } - database, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, region, instanceId, databaseId). + database, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId). SetTimeout(15 * time.Minute). SetSleepBeforeWait(15 * time.Second). WaitWithContext(ctx) @@ -305,7 +305,7 @@ func (r *databaseResource) Read( ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "database_id", databaseId) - databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, region, instanceId, databaseId). + databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId). SetTimeout(15 * time.Minute). SetSleepBeforeWait(15 * time.Second). WaitWithContext(ctx) @@ -437,7 +437,7 @@ func (r *databaseResource) Update( ctx = core.LogResponse(ctx) - databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, region, instanceId, databaseId64). + databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId64). SetTimeout(15 * time.Minute). SetSleepBeforeWait(15 * time.Second). WaitWithContext(ctx) @@ -540,11 +540,9 @@ func (r *databaseResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - ctx = core.InitProviderContext(ctx) if req.ID != "" { - idParts := strings.Split(req.ID, core.Separator) if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { diff --git a/stackit/internal/services/postgresflexalpha/user/mapper_test.go b/stackit/internal/services/postgresflexalpha/user/mapper_test.go index 06ba3d50..1a8c3f99 100644 --- a/stackit/internal/services/postgresflexalpha/user/mapper_test.go +++ b/stackit/internal/services/postgresflexalpha/user/mapper_test.go @@ -52,7 +52,6 @@ func TestMapDataSourceFields(t *testing.T) { }, testRegion, dataSourceModel{ - UserModel: data.UserModel{ Id: types.Int64Value(1), UserId: types.Int64Value(1), diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go index 6c7e3979..4adbaff0 100644 --- a/stackit/internal/services/postgresflexalpha/user/resource.go +++ b/stackit/internal/services/postgresflexalpha/user/resource.go @@ -586,11 +586,9 @@ func (r *userResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - ctx = core.InitProviderContext(ctx) if req.ID != "" { - idParts := strings.Split(req.ID, core.Separator) if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { @@ -651,7 +649,6 @@ func (r *userResource) extractIdentityData( model resourceModel, identity UserResourceIdentityModel, ) (*clientArg, error) { - var projectId, region, instanceId string var userId int64 diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasource.go b/stackit/internal/services/sqlserverflexalpha/database/datasource.go index 05c8c1ec..18930414 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/datasource.go @@ -80,7 +80,6 @@ func (d *databaseDataSource) Configure( // Read retrieves the resource's state from the API. func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var model dataSourceModel diags := req.Config.Get(ctx, &model) resp.Diagnostics.Append(diags...) diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index c9ac17e2..73a410ae 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -422,11 +422,9 @@ func (r *databaseResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - ctx = core.InitProviderContext(ctx) if req.ID != "" { - idParts := strings.Split(req.ID, core.Separator) if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go index 4c091eff..28f82aa4 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/functions.go @@ -105,7 +105,6 @@ func handleEncryption( resp.Encryption.KekKeyRingId == nil || resp.Encryption.KekKeyVersion == nil || resp.Encryption.ServiceAccount == nil { - if encryptionValue.IsNull() || encryptionValue.IsUnknown() { return sqlserverflexResGen.NewEncryptionValueNull() } diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go index 4e4dd4ca..36b4d2e7 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go @@ -144,7 +144,6 @@ var modifiersFileByte []byte // Schema defines the schema for the resource. func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - schema := sqlserverflexalpha2.InstanceResourceSchema(ctx) fields, err := utils.ReadModifiersConfig(modifiersFileByte) diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go index 46c8418d..cb4e39f3 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go @@ -42,7 +42,6 @@ func TestMapDataSourceFields(t *testing.T) { { "simple_values", &sqlserverflexalpha.GetUserResponse{ - Roles: &[]string{ "role_1", "role_2", diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index 64fefde3..3119a06c 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -389,11 +389,9 @@ func (r *userResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - ctx = core.InitProviderContext(ctx) if req.ID != "" { - idParts := strings.Split(req.ID, core.Separator) if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index fd970722..39879cae 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -73,7 +73,6 @@ func (r *databaseResource) Metadata( var modifiersFileByte []byte func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - s := sqlserverflexbetaResGen.DatabaseResourceSchema(ctx) fields, err := utils.ReadModifiersConfig(modifiersFileByte) diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go index de2dd9d6..3893279a 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go @@ -139,7 +139,6 @@ func handleEncryption( resp.Encryption.KekKeyRingId == nil || resp.Encryption.KekKeyVersion == nil || resp.Encryption.ServiceAccount == nil { - if m.Encryption.IsNull() || m.Encryption.IsUnknown() { return sqlserverflexbetaResGen.NewEncryptionValueNull() } @@ -172,7 +171,6 @@ func handleDSEncryption( resp.Encryption.KekKeyRingId == nil || resp.Encryption.KekKeyVersion == nil || resp.Encryption.ServiceAccount == nil { - if m.Encryption.IsNull() || m.Encryption.IsUnknown() { return sqlserverflexbetaDataGen.NewEncryptionValueNull() } diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go index 1fc1f36e..b0e2bb02 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go @@ -42,7 +42,6 @@ func TestMapDataSourceFields(t *testing.T) { { "simple_values", &sqlserverflexbeta.GetUserResponse{ - Roles: &[]string{ "role_1", "role_2", diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index a5de43ea..9508c743 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -453,11 +453,9 @@ func (r *userResource) ImportState( req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { - ctx = core.InitProviderContext(ctx) if req.ID != "" { - idParts := strings.Split(req.ID, core.Separator) if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { diff --git a/stackit/internal/wait/postgresflexalpha/wait.go b/stackit/internal/wait/postgresflexalpha/wait.go index 330932f2..5c41c4b1 100644 --- a/stackit/internal/wait/postgresflexalpha/wait.go +++ b/stackit/internal/wait/postgresflexalpha/wait.go @@ -272,8 +272,12 @@ func GetDatabaseByIdWaitHandler( } switch oapiErr.StatusCode { case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable: + tflog.Warn(ctx, "api responded with 50[2,3,4] status", map[string]interface{}{ + "status": oapiErr.StatusCode, + }) + return false, nil, nil case http.StatusNotFound: - tflog.Warn(ctx, "api responded with status", map[string]interface{}{ + tflog.Warn(ctx, "api responded with 404 status", map[string]interface{}{ "status": oapiErr.StatusCode, }) return false, nil, nil From 55a0917a8609cd44b2c49c0791dec33f53defe3b Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Fri, 13 Feb 2026 16:23:42 +0000 Subject: [PATCH 27/41] fix: fix sqlserverflexalpha (#61) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/61 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- go.mod | 1 - go.sum | 2 - .../sqlserverflexalpha/database/datasource.go | 104 ++- .../sqlserverflexalpha/database/mapper.go | 8 +- .../database/mapper_test.go | 24 +- .../database/planModifiers.yaml | 3 +- .../sqlserverflexalpha/database/resource.go | 243 ++++-- .../sqlserverflexalpha/flavor/datasource.go | 186 +++- .../sqlserverflexalpha/flavor/functions.go | 2 +- .../flavor/functions_test.go | 2 +- .../sqlserverflexalpha/flavors/datasource.go | 103 ++- .../sqlserverflexalpha/instance/datasource.go | 159 ++-- .../sqlserverflexalpha/instance/functions.go | 223 +++-- .../sqlserverflexalpha/instance/resource.go | 776 ++++++++--------- .../instance/resource_test.go | 823 ------------------ .../sqlserverflex_acc_test.go | 276 ++++-- .../sqlserverflex_acc_test.go_old.bak | 483 ---------- .../testdata/instance_template.gompl | 19 +- .../sqlserverflexalpha/user/datasource.go | 146 +--- .../sqlserverflexalpha/user/mapper.go | 9 +- .../sqlserverflexalpha/user/mapper_test.go | 18 +- .../user/planModifiers.yaml | 43 +- .../sqlserverflexalpha/user/resource.go | 124 ++- .../internal/wait/sqlserverflexalpha/wait.go | 436 +++++++--- .../wait/sqlserverflexalpha/wait_test.go | 264 ++++-- 25 files changed, 1973 insertions(+), 2504 deletions(-) delete mode 100644 stackit/internal/services/sqlserverflexalpha/instance/resource_test.go delete mode 100644 stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go_old.bak diff --git a/go.mod b/go.mod index fc21ec9e..24688013 100644 --- a/go.mod +++ b/go.mod @@ -18,7 +18,6 @@ require ( github.com/spf13/cobra v1.10.2 github.com/stackitcloud/stackit-sdk-go/core v0.21.0 github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha - github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1 github.com/teambition/rrule-go v1.8.2 gopkg.in/yaml.v3 v3.0.1 ) diff --git a/go.sum b/go.sum index 43e5e03e..23dcc47b 100644 --- a/go.sum +++ b/go.sum @@ -174,8 +174,6 @@ github.com/stackitcloud/stackit-sdk-go/core v0.21.0 h1:QXZqiaO7U/4IpTkJfzt4dt6Qx github.com/stackitcloud/stackit-sdk-go/core v0.21.0/go.mod h1:fqto7M82ynGhEnpZU6VkQKYWYoFG5goC076JWXTUPRQ= github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha h1:ugpMOMUZGB0yXsWcfe97F7GCdjlexbjFuGD8ZeyMSts= github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h1:v5VGvTxLcCdJJmblbhqYalt/MFHcElDfYoy15CMhaWs= -github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1 h1:6MJdy1xmdE+uOo/F8mR5HSldjPSHpdhwuqS3u9m2EWQ= -github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1/go.mod h1:XLr3ZfrT1g8ZZMm7A6RXOPBuhBkikdUN2o/+/Y+Hu+g= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasource.go b/stackit/internal/services/sqlserverflexalpha/database/datasource.go index 18930414..3c40d6b0 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/datasource.go @@ -10,34 +10,34 @@ import ( "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen" - sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" -) -// dataSourceModel maps the data source schema data. -type dataSourceModel struct { - sqlserverflexalphaGen.DatabaseModel - TerraformID types.String `tfsdk:"id"` -} + sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen" +) var _ datasource.DataSource = (*databaseDataSource)(nil) -// NewDatabaseDataSource creates a new database data source. +const errorPrefix = "[sqlserverflexalpha - Database]" + func NewDatabaseDataSource() datasource.DataSource { return &databaseDataSource{} } +type dataSourceModel struct { + sqlserverflexalphaGen.DatabaseModel + TerraformId types.String `tfsdk:"id"` +} + type databaseDataSource struct { - client *sqlserverflexalpha.APIClient + client *sqlserverflexalphaPkg.APIClient providerData core.ProviderData } -// Metadata returns the data source type name. func (d *databaseDataSource) Metadata( _ context.Context, req datasource.MetadataRequest, @@ -46,16 +46,13 @@ func (d *databaseDataSource) Metadata( resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database" } -// Schema defines the data source schema. func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - s := sqlserverflexalphaGen.DatabaseDataSourceSchema(ctx) - s.Attributes["id"] = schema.StringAttribute{ - Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," + - "`database_id`\\\".\",", - Computed: true, + resp.Schema = sqlserverflexalphaGen.DatabaseDataSourceSchema(ctx) + resp.Schema.Attributes["id"] = schema.StringAttribute{ + Computed: true, + Description: "The terraform internal identifier.", + MarkdownDescription: "The terraform internal identifier.", } - - resp.Schema = s } // Configure adds the provider configured client to the data source. @@ -70,19 +67,41 @@ func (d *databaseDataSource) Configure( return } - apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(d.providerData.RoundTripper), + utils.UserAgentConfigOption(d.providerData.Version), + } + if d.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithRegion(d.providerData.GetRegion()), + ) + } + apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) return } d.client = apiClient - tflog.Info(ctx, "SQL SERVER Flex alpha database client configured") + tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) } -// Read retrieves the resource's state from the API. func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var model dataSourceModel - diags := req.Config.Get(ctx, &model) - resp.Diagnostics.Append(diags...) + var data dataSourceModel + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { return } @@ -90,22 +109,17 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques ctx = core.InitProviderContext(ctx) // Extract identifiers from the plan - projectId := model.ProjectId.ValueString() - instanceId := model.InstanceId.ValueString() - region := d.providerData.GetRegionWithOverride(model.Region) - databaseName := model.DatabaseName.ValueString() + projectId := data.ProjectId.ValueString() + region := d.providerData.GetRegionWithOverride(data.Region) + instanceId := data.InstanceId.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "region", region) - ctx = tflog.SetField(ctx, "database_name", databaseName) + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + databaseName := data.DatabaseName.ValueString() - // Fetch database from the API databaseResp, err := d.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() - - if resp.Diagnostics.HasError() { - return - } if err != nil { handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId) resp.State.RemoveResource(ctx) @@ -113,9 +127,8 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques } ctx = core.LogResponse(ctx) - // Map response body to schema and populate Computed attribute values - err = mapFields(databaseResp, &model, region) + err = mapFields(databaseResp, &data, region) if err != nil { core.LogAndAddError( ctx, @@ -126,14 +139,11 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques return } - // Set refreshed state - diags = resp.State.Set(ctx, model) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "SQL Server Flex beta database read") - tflog.Info(ctx, "SQL Server Flex alpha database read") } // handleReadError centralizes API error handling for the Read operation. diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper.go b/stackit/internal/services/sqlserverflexalpha/database/mapper.go index 1a77c31e..55d0e5ae 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/mapper.go +++ b/stackit/internal/services/sqlserverflexalpha/database/mapper.go @@ -41,7 +41,7 @@ func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSource model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel()) model.CollationName = types.StringValue(source.GetCollationName()) - model.TerraformID = utils.BuildInternalTerraformId( + model.TerraformId = utils.BuildInternalTerraformId( model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), @@ -80,6 +80,12 @@ func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *re model.ProjectId = types.StringValue(model.ProjectId.ValueString()) model.InstanceId = types.StringValue(model.InstanceId.ValueString()) + model.Compatibility = types.Int64Value(source.GetCompatibilityLevel()) + model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel()) + + model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api + model.CollationName = types.StringValue(source.GetCollationName()) + return nil } diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go index bde90f6a..b0daa742 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go +++ b/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go @@ -35,7 +35,7 @@ func TestMapFields(t *testing.T) { Name: utils.Ptr("my-db"), CollationName: utils.Ptr("collation"), CompatibilityLevel: utils.Ptr(int64(150)), - Owner: utils.Ptr("\"my-owner\""), + Owner: utils.Ptr("my-owner"), }, model: &dataSourceModel{ DatabaseModel: datasource.DatabaseModel{ @@ -58,7 +58,7 @@ func TestMapFields(t *testing.T) { CompatibilityLevel: types.Int64Value(150), CollationName: types.StringValue("collation"), }, - TerraformID: types.StringValue("my-project,eu01,my-instance,my-db"), + TerraformId: types.StringValue("my-project,eu01,my-instance,my-db"), }, }, }, @@ -127,7 +127,7 @@ func TestMapResourceFields(t *testing.T) { source: &sqlserverflexalpha.GetDatabaseResponse{ Id: utils.Ptr(int64(1)), Name: utils.Ptr("my-db"), - Owner: utils.Ptr("\"my-owner\""), + Owner: utils.Ptr("my-owner"), }, model: &resourceModel{ ProjectId: types.StringValue("my-project"), @@ -137,13 +137,17 @@ func TestMapResourceFields(t *testing.T) { }, expected: expected{ model: &resourceModel{ - Id: types.Int64Value(1), - Name: types.StringValue("my-db"), - DatabaseName: types.StringValue("my-db"), - InstanceId: types.StringValue("my-instance"), - ProjectId: types.StringValue("my-project"), - Region: types.StringValue("eu01"), - Owner: types.StringValue("my-owner"), + Id: types.Int64Value(1), + Name: types.StringValue("my-db"), + Compatibility: types.Int64Value(0), + CompatibilityLevel: types.Int64Value(0), + Collation: types.StringValue(""), + CollationName: types.StringValue(""), + DatabaseName: types.StringValue("my-db"), + InstanceId: types.StringValue("my-instance"), + ProjectId: types.StringValue("my-project"), + Region: types.StringValue("eu01"), + Owner: types.StringValue("my-owner"), }, }, }, diff --git a/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml index d6209230..1d010ed7 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml +++ b/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml @@ -31,7 +31,7 @@ fields: - name: 'owner' modifiers: - - 'RequiresReplace' + - 'UseStateForUnknown' - name: 'database_name' modifiers: @@ -43,6 +43,7 @@ fields: - name: 'compatibility' modifiers: + - 'UseStateForUnknown' - 'RequiresReplace' - name: 'compatibility_level' diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index 73a410ae..e7887330 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -6,8 +6,8 @@ import ( "errors" "fmt" "net/http" - "strconv" "strings" + "time" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" @@ -19,11 +19,12 @@ import ( "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" - sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/resources_gen" + sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/resources_gen" ) var ( @@ -46,9 +47,13 @@ func NewDatabaseResource() resource.Resource { } // resourceModel describes the resource data model. -type resourceModel = sqlserverflexalphaGen.DatabaseModel +type resourceModel = sqlserverflexalphaResGen.DatabaseModel + +type databaseResource struct { + client *sqlserverflexalpha.APIClient + providerData core.ProviderData +} -// DatabaseResourceIdentityModel describes the resource's identity attributes. type DatabaseResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` @@ -56,12 +61,11 @@ type DatabaseResourceIdentityModel struct { DatabaseName types.String `tfsdk:"database_name"` } -type databaseResource struct { - client *sqlserverflexalpha.APIClient - providerData core.ProviderData -} - -func (r *databaseResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *databaseResource) Metadata( + _ context.Context, + req resource.MetadataRequest, + resp *resource.MetadataResponse, +) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database" } @@ -69,7 +73,7 @@ func (r *databaseResource) Metadata(_ context.Context, req resource.MetadataRequ var modifiersFileByte []byte func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - s := sqlserverflexalphaGen.DatabaseResourceSchema(ctx) + s := sqlserverflexalphaResGen.DatabaseResourceSchema(ctx) fields, err := utils.ReadModifiersConfig(modifiersFileByte) if err != nil { @@ -124,10 +128,10 @@ func (r *databaseResource) Configure( config.WithCustomAuth(r.providerData.RoundTripper), utils.UserAgentConfigOption(r.providerData.Version), } - if r.providerData.PostgresFlexCustomEndpoint != "" { + if r.providerData.SQLServerFlexCustomEndpoint != "" { apiClientConfigOptions = append( apiClientConfigOptions, - config.WithEndpoint(r.providerData.PostgresFlexCustomEndpoint), + config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint), ) } else { apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) @@ -148,50 +152,74 @@ func (r *databaseResource) Configure( } func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var model resourceModel + var data resourceModel + createErr := "DB create error" + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) - diags := req.Plan.Get(ctx, &model) - resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } ctx = core.InitProviderContext(ctx) - projectId := model.ProjectId.ValueString() - region := model.Region.ValueString() - instanceId := model.InstanceId.ValueString() - + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() + instanceId := data.InstanceId.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "region", region) + ctx = tflog.SetField(ctx, "instance_id", instanceId) - // Generate API request body from model - payload, err := toCreatePayload(&model) + databaseName := data.Name.ValueString() + ctx = tflog.SetField(ctx, "database_name", databaseName) + + payLoad := sqlserverflexalpha.CreateDatabaseRequestPayload{} + if !data.Collation.IsNull() && !data.Collation.IsUnknown() { + payLoad.Collation = data.Collation.ValueStringPointer() + } + + if !data.Compatibility.IsNull() && !data.Compatibility.IsUnknown() { + payLoad.Compatibility = data.Compatibility.ValueInt64Pointer() + } + + payLoad.Name = data.Name.ValueStringPointer() + payLoad.Owner = data.Owner.ValueStringPointer() + + _, err := wait.WaitForUserWaitHandler( + ctx, + r.client, + projectId, + instanceId, + region, + data.Owner.ValueString(), + ). + SetSleepBeforeWait(10 * time.Second). + WaitWithContext(ctx) if err != nil { core.LogAndAddError( ctx, &resp.Diagnostics, - "Error creating database", - fmt.Sprintf("Creating API payload: %v", err), + createErr, + fmt.Sprintf("Calling API: %v", err), ) return } - // Create new database - databaseResp, err := r.client.CreateDatabaseRequest( - ctx, - projectId, - region, - instanceId, - ).CreateDatabaseRequestPayload(*payload).Execute() + + createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId). + CreateDatabaseRequestPayload(payLoad). + Execute() if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating database", fmt.Sprintf("Calling API: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + fmt.Sprintf("Calling API: %v", err), + ) return } - ctx = core.LogResponse(ctx) - - if databaseResp == nil || databaseResp.Id == nil { + if createResp == nil || createResp.Id == nil { core.LogAndAddError( ctx, &resp.Diagnostics, @@ -201,11 +229,9 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - databaseId := *databaseResp.Id - databaseName := model.DatabaseName.String() + databaseId := *createResp.Id ctx = tflog.SetField(ctx, "database_id", databaseId) - ctx = tflog.SetField(ctx, "database_name", databaseName) ctx = core.LogResponse(ctx) @@ -221,6 +247,69 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } + // TODO: is this necessary to wait for the database-> API say 200 ? + waitResp, err := wait.CreateDatabaseWaitHandler( + ctx, + r.client, + projectId, + instanceId, + region, + databaseName, + ).SetSleepBeforeWait( + 30 * time.Second, + ).SetTimeout( + 15 * time.Minute, + ).WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + fmt.Sprintf("Database creation waiting: %v", err), + ) + return + } + + if waitResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + "Database creation waiting: returned id is nil", + ) + return + } + + if *waitResp.Id != databaseId { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + "Database creation waiting: returned id is different", + ) + return + } + + if *waitResp.Owner != data.Owner.ValueString() { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + "Database creation waiting: returned owner is different", + ) + return + } + + if *waitResp.Name != data.Name.ValueString() { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + createErr, + "Database creation waiting: returned name is different", + ) + return + } + database, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() if err != nil { core.LogAndAddError( @@ -233,7 +322,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques } // Map response body to schema - err = mapResourceFields(database, &model, region) + err = mapResourceFields(database, &data, region) if err != nil { core.LogAndAddError( ctx, @@ -245,11 +334,13 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques } // Set state to fully populated data - resp.Diagnostics.Append(resp.State.Set(ctx, model)...) + resp.Diagnostics.Append(resp.State.Set(ctx, data)...) if resp.Diagnostics.HasError() { return } + // Save data into Terraform state + tflog.Info(ctx, "sqlserverflexalpha.Database created") } @@ -310,7 +401,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r return } - // Set data returned by API in identity + // Save identity into Terraform state identity := DatabaseResourceIdentityModel{ ProjectID: types.StringValue(projectId), Region: types.StringValue(region), @@ -372,7 +463,13 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques // Delete existing record set err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName) if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err)) + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error deleting database", + fmt.Sprintf( + "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId)) + return } ctx = core.LogResponse(ctx) @@ -388,11 +485,13 @@ func (r *databaseResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - var configModel resourceModel + // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return } + + var configModel resourceModel resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) if resp.Diagnostics.HasError() { return @@ -409,6 +508,23 @@ func (r *databaseResource) ModifyPlan( return } + var identityModel DatabaseResourceIdentityModel + identityModel.ProjectID = planModel.ProjectId + identityModel.Region = planModel.Region + + if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { + identityModel.InstanceID = planModel.InstanceId + } + + if !planModel.Name.IsNull() && !planModel.Name.IsUnknown() { + identityModel.DatabaseName = planModel.Name + } + + resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) if resp.Diagnostics.HasError() { return @@ -416,7 +532,7 @@ func (r *databaseResource) ModifyPlan( } // ImportState imports a resource into the Terraform state on success. -// The expected import identifier format is: [project_id],[region],[instance_id],[database_id] +// The expected format of the resource import identifier is: project_id,zone_id,record_set_id func (r *databaseResource) ImportState( ctx context.Context, req resource.ImportStateRequest, @@ -432,36 +548,31 @@ func (r *databaseResource) ImportState( ctx, &resp.Diagnostics, "Error importing database", fmt.Sprintf( - "Expected import identifier with format [project_id],[region],[instance_id],[database_name], got %q", + "Expected import identifier with format: [project_id],[region],[instance_id],[database_name] Got: %q", req.ID, ), ) return } - databaseId, err := strconv.ParseInt(idParts[3], 10, 64) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error importing database", - fmt.Sprintf("Invalid database_id format: %q. It must be a valid integer.", idParts[3]), - ) - return - } - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseId)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), idParts[3])...) - core.LogAndAddWarning( - ctx, - &resp.Diagnostics, - "Sqlserverflexalpha database imported with empty password", - "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.", - ) - tflog.Info(ctx, "Sqlserverflexalpha database state imported") + var identityData DatabaseResourceIdentityModel + identityData.ProjectID = types.StringValue(idParts[0]) + identityData.Region = types.StringValue(idParts[1]) + identityData.InstanceID = types.StringValue(idParts[2]) + identityData.DatabaseName = types.StringValue(idParts[3]) + + resp.Diagnostics.Append(resp.Identity.Set(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Info(ctx, "sqlserverflexalpha database state imported") + return } // If no ID is provided, attempt to read identity attributes from the import configuration @@ -481,7 +592,7 @@ func (r *databaseResource) ImportState( resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseName)...) - tflog.Info(ctx, "Sqlserverflexalpha database state imported") + tflog.Info(ctx, "sqlserverflexalpha database state imported") } // extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model. diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go index a48e7572..d56aafa5 100644 --- a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go @@ -1,4 +1,4 @@ -package sqlserverFlexAlphaFlavor +package sqlserverflexalphaFlavor import ( "context" @@ -10,14 +10,14 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types/basetypes" "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor/datasources_gen" - sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" ) // Ensure the implementation satisfies the expected interfaces. @@ -26,11 +26,6 @@ var ( _ datasource.DataSourceWithConfigure = &flavorDataSource{} ) -// NewFlavorDataSource is a helper function to simplify the provider implementation. -func NewFlavorDataSource() datasource.DataSource { - return &flavorDataSource{} -} - type FlavorModel struct { ProjectId types.String `tfsdk:"project_id"` Region types.String `tfsdk:"region"` @@ -46,39 +41,58 @@ type FlavorModel struct { StorageClasses types.List `tfsdk:"storage_classes"` } +// NewFlavorDataSource is a helper function to simplify the provider implementation. +func NewFlavorDataSource() datasource.DataSource { + return &flavorDataSource{} +} + // flavorDataSource is the data source implementation. type flavorDataSource struct { - client *sqlserverflexalpha.APIClient + client *sqlserverflexalphaPkg.APIClient providerData core.ProviderData } // Metadata returns the data source type name. -func (r *flavorDataSource) Metadata( - _ context.Context, - req datasource.MetadataRequest, - resp *datasource.MetadataResponse, -) { +func (r *flavorDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_flavor" } // Configure adds the provider configured client to the data source. -func (r *flavorDataSource) Configure( - ctx context.Context, - req datasource.ConfigureRequest, - resp *datasource.ConfigureResponse, -) { +func (r *flavorDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { var ok bool r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { return } - apiClient := sqlserverflexUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(r.providerData.RoundTripper), + utils.UserAgentConfigOption(r.providerData.Version), + } + if r.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithRegion(r.providerData.GetRegion()), + ) + } + apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) return } r.client = apiClient - tflog.Info(ctx, "Postgres Flex instance client configured") + tflog.Info(ctx, "SQL Server Flex instance client configured") } func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { @@ -86,13 +100,13 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques Attributes: map[string]schema.Attribute{ "project_id": schema.StringAttribute{ Required: true, - Description: "The cpu count of the instance.", - MarkdownDescription: "The cpu count of the instance.", + Description: "The project ID of the flavor.", + MarkdownDescription: "The project ID of the flavor.", }, "region": schema.StringAttribute{ Required: true, - Description: "The flavor description.", - MarkdownDescription: "The flavor description.", + Description: "The region of the flavor.", + MarkdownDescription: "The region of the flavor.", }, "cpu": schema.Int64Attribute{ Required: true, @@ -109,6 +123,16 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques Description: "The memory of the instance in Gibibyte.", MarkdownDescription: "The memory of the instance in Gibibyte.", }, + "node_type": schema.StringAttribute{ + Required: true, + Description: "defines the nodeType it can be either single or HA", + MarkdownDescription: "defines the nodeType it can be either single or HA", + }, + "flavor_id": schema.StringAttribute{ + Computed: true, + Description: "The id of the instance flavor.", + MarkdownDescription: "The id of the instance flavor.", + }, "description": schema.StringAttribute{ Computed: true, Description: "The flavor description.", @@ -116,13 +140,8 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques }, "id": schema.StringAttribute{ Computed: true, - Description: "The terraform id of the instance flavor.", - MarkdownDescription: "The terraform id of the instance flavor.", - }, - "flavor_id": schema.StringAttribute{ - Computed: true, - Description: "The flavor id of the instance flavor.", - MarkdownDescription: "The flavor id of the instance flavor.", + Description: "The id of the instance flavor.", + MarkdownDescription: "The id of the instance flavor.", }, "max_gb": schema.Int64Attribute{ Computed: true, @@ -134,13 +153,7 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques Description: "minimum storage which is required to order in Gigabyte.", MarkdownDescription: "minimum storage which is required to order in Gigabyte.", }, - "node_type": schema.StringAttribute{ - Required: true, - Description: "defines the nodeType it can be either single or replica", - MarkdownDescription: "defines the nodeType it can be either single or replica", - }, "storage_classes": schema.ListNestedAttribute{ - Computed: true, NestedObject: schema.NestedAttributeObject{ Attributes: map[string]schema.Attribute{ "class": schema.StringAttribute{ @@ -159,8 +172,89 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques }, }, }, + Computed: true, + Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", }, }, + //Attributes: map[string]schema.Attribute{ + // "project_id": schema.StringAttribute{ + // Required: true, + // Description: "The cpu count of the instance.", + // MarkdownDescription: "The cpu count of the instance.", + // }, + // "region": schema.StringAttribute{ + // Required: true, + // Description: "The flavor description.", + // MarkdownDescription: "The flavor description.", + // }, + // "cpu": schema.Int64Attribute{ + // Required: true, + // Description: "The cpu count of the instance.", + // MarkdownDescription: "The cpu count of the instance.", + // }, + // "ram": schema.Int64Attribute{ + // Required: true, + // Description: "The memory of the instance in Gibibyte.", + // MarkdownDescription: "The memory of the instance in Gibibyte.", + // }, + // "storage_class": schema.StringAttribute{ + // Required: true, + // Description: "The memory of the instance in Gibibyte.", + // MarkdownDescription: "The memory of the instance in Gibibyte.", + // }, + // "description": schema.StringAttribute{ + // Computed: true, + // Description: "The flavor description.", + // MarkdownDescription: "The flavor description.", + // }, + // "id": schema.StringAttribute{ + // Computed: true, + // Description: "The terraform id of the instance flavor.", + // MarkdownDescription: "The terraform id of the instance flavor.", + // }, + // "flavor_id": schema.StringAttribute{ + // Computed: true, + // Description: "The flavor id of the instance flavor.", + // MarkdownDescription: "The flavor id of the instance flavor.", + // }, + // "max_gb": schema.Int64Attribute{ + // Computed: true, + // Description: "maximum storage which can be ordered for the flavor in Gigabyte.", + // MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", + // }, + // "min_gb": schema.Int64Attribute{ + // Computed: true, + // Description: "minimum storage which is required to order in Gigabyte.", + // MarkdownDescription: "minimum storage which is required to order in Gigabyte.", + // }, + // "node_type": schema.StringAttribute{ + // Required: true, + // Description: "defines the nodeType it can be either single or replica", + // MarkdownDescription: "defines the nodeType it can be either single or replica", + // }, + // "storage_classes": schema.ListNestedAttribute{ + // Computed: true, + // NestedObject: schema.NestedAttributeObject{ + // Attributes: map[string]schema.Attribute{ + // "class": schema.StringAttribute{ + // Computed: true, + // }, + // "max_io_per_sec": schema.Int64Attribute{ + // Computed: true, + // }, + // "max_through_in_mb": schema.Int64Attribute{ + // Computed: true, + // }, + // }, + // CustomType: sqlserverflexalphaGen.StorageClassesType{ + // ObjectType: types.ObjectType{ + // AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx), + // }, + // }, + // }, + // }, + // }, } } @@ -185,7 +279,7 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, return } - var foundFlavors []sqlserverflexalpha.ListFlavors + var foundFlavors []sqlserverflexalphaPkg.ListFlavors for _, flavor := range flavors { if model.Cpu.ValueInt64() != *flavor.Cpu { continue @@ -220,13 +314,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, model.MinGb = types.Int64Value(*f.MinGB) if f.StorageClasses == nil { - model.StorageClasses = types.ListNull( - sqlserverflexalphaGen.StorageClassesType{ - ObjectType: basetypes.ObjectType{ - AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx), - }, + model.StorageClasses = types.ListNull(sqlserverflexalphaGen.StorageClassesType{ + ObjectType: basetypes.ObjectType{ + AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx), }, - ) + }) } else { var scList []attr.Value for _, sc := range *f.StorageClasses { @@ -259,5 +351,5 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, if resp.Diagnostics.HasError() { return } - tflog.Info(ctx, "Postgres Flex flavors read") + tflog.Info(ctx, "SQL Server Flex flavors read") } diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/functions.go b/stackit/internal/services/sqlserverflexalpha/flavor/functions.go index e396324a..469b7bce 100644 --- a/stackit/internal/services/sqlserverflexalpha/flavor/functions.go +++ b/stackit/internal/services/sqlserverflexalpha/flavor/functions.go @@ -1,4 +1,4 @@ -package sqlserverFlexAlphaFlavor +package sqlserverflexalphaFlavor import ( "context" diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go b/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go index af3b370a..bed6462c 100644 --- a/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go +++ b/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go @@ -1,4 +1,4 @@ -package sqlserverFlexAlphaFlavor +package sqlserverflexalphaFlavor import ( "context" diff --git a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go index 8d5e8a50..2286e81b 100644 --- a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go @@ -2,30 +2,39 @@ package sqlserverflexalpha import ( "context" + "fmt" + "net/http" "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen" ) -// dataSourceModel maps the data source schema data. -type dataSourceModel = sqlserverflexalphaGen.FlavorsModel - var _ datasource.DataSource = (*flavorsDataSource)(nil) -// TODO: Use NewFlavorsDataSource when datasource is implemented +const errorPrefix = "[sqlserverflexalpha - Flavors]" + func NewFlavorsDataSource() datasource.DataSource { return &flavorsDataSource{} } +type dataSourceModel struct { + sqlserverflexalphaGen.FlavorsModel + TerraformId types.String `tfsdk:"id"` +} + type flavorsDataSource struct { - client *sqlserverflexalpha.APIClient + client *sqlserverflexalphaPkg.APIClient providerData core.ProviderData } @@ -39,6 +48,11 @@ func (d *flavorsDataSource) Metadata( func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = sqlserverflexalphaGen.FlavorsDataSourceSchema(ctx) + resp.Schema.Attributes["id"] = schema.StringAttribute{ + Computed: true, + Description: "The terraform internal identifier.", + MarkdownDescription: "The terraform internal identifier.", + } } // Configure adds the provider configured client to the data source. @@ -53,12 +67,34 @@ func (d *flavorsDataSource) Configure( return } - apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(d.providerData.RoundTripper), + utils.UserAgentConfigOption(d.providerData.Version), + } + if d.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithRegion(d.providerData.GetRegion()), + ) + } + apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) return } d.client = apiClient - tflog.Info(ctx, "SQL SERVER Flex flavors client configured") + tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) } func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { @@ -71,11 +107,50 @@ func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest return } - // Todo: Read API call logic + ctx = core.InitProviderContext(ctx) - // Example data value setting - // data.Id = types.StringValue("example-id") + projectId := data.ProjectId.ValueString() + region := d.providerData.GetRegionWithOverride(data.Region) + // TODO: implement right identifier for flavors + flavorsId := data.Flavors - // Save data into Terraform state + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // TODO: implement needed fields + ctx = tflog.SetField(ctx, "flavors_id", flavorsId) + + // TODO: refactor to correct implementation + _, err := d.client.GetFlavorsRequest(ctx, projectId, region).Execute() + if err != nil { + utils.LogError( + ctx, + &resp.Diagnostics, + err, + "Reading flavors", + fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId), + map[int]string{ + http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), + }, + ) + resp.State.RemoveResource(ctx) + return + } + + ctx = core.LogResponse(ctx) + + // TODO: refactor to correct implementation of internal tf id + data.TerraformId = utils.BuildInternalTerraformId(projectId, region) + + // TODO: fill remaining fields + // data.Flavors = types.Sometype(apiResponse.GetFlavors()) + // data.Page = types.Sometype(apiResponse.GetPage()) + // data.Pagination = types.Sometype(apiResponse.GetPagination()) + // data.ProjectId = types.Sometype(apiResponse.GetProjectId()) + // data.Region = types.Sometype(apiResponse.GetRegion()) + // data.Size = types.Sometype(apiResponse.GetSize()) + // data.Sort = types.Sometype(apiResponse.GetSort())// Save data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, fmt.Sprintf("%s read successful", errorPrefix)) } diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go index 70c382df..123b1fe8 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go @@ -1,5 +1,3 @@ -// Copyright (c) STACKIT - package sqlserverflexalpha import ( @@ -7,47 +5,40 @@ import ( "fmt" "net/http" - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen" - sqlserverflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" - sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" - - sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" - - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-log/tflog" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + + sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen" ) -// dataSourceModel maps the data source schema data. -type dataSourceModel struct { - sqlserverflexalpha2.InstanceModel - TerraformID types.String `tfsdk:"id"` -} +var _ datasource.DataSource = (*instanceDataSource)(nil) -// Ensure the implementation satisfies the expected interfaces. -var ( - _ datasource.DataSource = &instanceDataSource{} -) +const errorPrefix = "[sqlserverflexalpha - Instance]" -// NewInstanceDataSource is a helper function to simplify the provider implementation. func NewInstanceDataSource() datasource.DataSource { return &instanceDataSource{} } -// instanceDataSource is the data source implementation. +// dataSourceModel maps the data source schema data. +type dataSourceModel struct { + sqlserverflexalphaGen.InstanceModel + TerraformID types.String `tfsdk:"id"` +} + type instanceDataSource struct { - client *sqlserverflex.APIClient + client *sqlserverflexalphaPkg.APIClient providerData core.ProviderData } -// Metadata returns the data source type name. -func (r *instanceDataSource) Metadata( +func (d *instanceDataSource) Metadata( _ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse, @@ -55,66 +46,80 @@ func (r *instanceDataSource) Metadata( resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_instance" } +func (d *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = sqlserverflexalphaGen.InstanceDataSourceSchema(ctx) +} + // Configure adds the provider configured client to the data source. -func (r *instanceDataSource) Configure( +func (d *instanceDataSource) Configure( ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse, ) { var ok bool - r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { return } - apiClient := sqlserverflexUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(d.providerData.RoundTripper), + utils.UserAgentConfigOption(d.providerData.Version), + } + if d.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithRegion(d.providerData.GetRegion()), + ) + } + apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) return } - r.client = apiClient - tflog.Info(ctx, "SQLServer Flex instance client configured") + d.client = apiClient + tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix)) } -// Schema defines the schema for the data source. -func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - s := sqlserverflexalpha.InstanceDataSourceSchema(ctx) - s.Attributes["id"] = schema.StringAttribute{ - Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`\\\".", - Computed: true, - } +func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data dataSourceModel - resp.Schema = s -} + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) -// Read refreshes the Terraform state with the latest data. -func (r *instanceDataSource) Read( - ctx context.Context, - req datasource.ReadRequest, - resp *datasource.ReadResponse, -) { // nolint:gocritic // function signature required by Terraform - var model dataSourceModel - diags := req.Config.Get(ctx, &model) - resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } ctx = core.InitProviderContext(ctx) - projectId := model.ProjectId.ValueString() - instanceId := model.InstanceId.ValueString() - region := r.providerData.GetRegionWithOverride(model.Region) + projectId := data.ProjectId.ValueString() + region := d.providerData.GetRegionWithOverride(data.Region) + instanceId := data.InstanceId.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "region", region) - instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + instanceResp, err := d.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() if err != nil { utils.LogError( ctx, &resp.Diagnostics, err, "Reading instance", - fmt.Sprintf("Instance with ID %q does not exist in project %q.", instanceId, projectId), + fmt.Sprintf("instance with ID %q does not exist in project %q.", instanceId, projectId), map[int]string{ http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId), }, @@ -125,49 +130,17 @@ func (r *instanceDataSource) Read( ctx = core.LogResponse(ctx) - // var storage = &storageModel{} - // if !model.Storage.IsNull() && !model.Storage.IsUnknown() { - // diags = model.Storage.As(ctx, storage, basetypes.ObjectAsOptions{}) - // resp.Diagnostics.Append(diags...) - // if resp.Diagnostics.HasError() { - // return - // } - //} - // - // var encryption = &encryptionModel{} - //if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() { - // diags = model.Encryption.As(ctx, encryption, basetypes.ObjectAsOptions{}) - // resp.Diagnostics.Append(diags...) - // if resp.Diagnostics.HasError() { - // return - // } - //} - // - //var network = &networkModel{} - //if !model.Network.IsNull() && !model.Network.IsUnknown() { - // diags = model.Network.As(ctx, network, basetypes.ObjectAsOptions{}) - // resp.Diagnostics.Append(diags...) - // if resp.Diagnostics.HasError() { - // return - // } - //} - - err = mapFields(ctx, instanceResp, &model, resp.Diagnostics) - // err = mapFields(ctx, instanceResp, &model, storage, encryption, network, region) + err = mapDataResponseToModel(ctx, instanceResp, &data, resp.Diagnostics) if err != nil { core.LogAndAddError( ctx, &resp.Diagnostics, - "Error reading instance", + fmt.Sprintf("%s Read", errorPrefix), fmt.Sprintf("Processing API payload: %v", err), ) return } - // Set refreshed state - diags = resp.State.Set(ctx, model) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - tflog.Info(ctx, "SQLServer Flex instance read") + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) } diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go index 28f82aa4..793b7e23 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/functions.go @@ -2,6 +2,7 @@ package sqlserverflexalpha import ( "context" + "errors" "fmt" "math" @@ -10,29 +11,35 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" - sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - sqlserverflexResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" + sqlserverflexalphaDataGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen" + sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" ) -// instanceModel is a type constraint for models that can be mapped from a GetInstanceResponse. -type instanceModel interface { - *dataSourceModel | *resourceModel -} - -func mapFields[T instanceModel]( +func mapResponseToModel( ctx context.Context, - resp *sqlserverflex.GetInstanceResponse, - m T, + resp *sqlserverflexalpha.GetInstanceResponse, + m *sqlserverflexalphaResGen.InstanceModel, tfDiags diag.Diagnostics, ) error { + m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) + m.Edition = types.StringValue(string(resp.GetEdition())) + m.Encryption = handleEncryption(m, resp) + m.FlavorId = types.StringValue(resp.GetFlavorId()) + m.Id = types.StringValue(resp.GetId()) + m.InstanceId = types.StringValue(resp.GetId()) + m.IsDeletable = types.BoolValue(resp.GetIsDeletable()) + m.Name = types.StringValue(resp.GetName()) netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl()) tfDiags.Append(diags...) if diags.HasError() { - return fmt.Errorf("error converting network acl response value") + return fmt.Errorf( + "error converting network acl response value", + ) } - net, diags := sqlserverflexResGen.NewNetworkValue( - sqlserverflexResGen.NetworkValue{}.AttributeTypes(ctx), + net, diags := sqlserverflexalphaResGen.NewNetworkValue( + sqlserverflexalphaResGen.NetworkValue{}.AttributeTypes(ctx), map[string]attr.Value{ "access_scope": types.StringValue(string(resp.Network.GetAccessScope())), "acl": netAcl, @@ -42,11 +49,15 @@ func mapFields[T instanceModel]( ) tfDiags.Append(diags...) if diags.HasError() { - return fmt.Errorf("error converting network response value") + return errors.New("error converting network response value") } + m.Network = net + m.Replicas = types.Int64Value(int64(resp.GetReplicas())) + m.RetentionDays = types.Int64Value(resp.GetRetentionDays()) + m.Status = types.StringValue(string(resp.GetStatus())) - stor, diags := sqlserverflexResGen.NewStorageValue( - sqlserverflexResGen.StorageValue{}.AttributeTypes(ctx), + stor, diags := sqlserverflexalphaResGen.NewStorageValue( + sqlserverflexalphaResGen.StorageValue{}.AttributeTypes(ctx), map[string]attr.Value{ "class": types.StringValue(resp.Storage.GetClass()), "size": types.Int64Value(resp.Storage.GetSize()), @@ -56,62 +67,117 @@ func mapFields[T instanceModel]( if diags.HasError() { return fmt.Errorf("error converting storage response value") } + m.Storage = stor - // The interface conversion is safe due to the type constraint. - model := any(m) + m.Version = types.StringValue(string(resp.GetVersion())) + return nil +} - if rm, ok := model.(*resourceModel); ok { - rm.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) - rm.Edition = types.StringValue(string(resp.GetEdition())) - rm.Encryption = handleEncryption(rm.Encryption, resp) - rm.FlavorId = types.StringValue(resp.GetFlavorId()) - rm.Id = types.StringValue(resp.GetId()) - rm.InstanceId = types.StringValue(resp.GetId()) - rm.IsDeletable = types.BoolValue(resp.GetIsDeletable()) - rm.Name = types.StringValue(resp.GetName()) - rm.Network = net - rm.Replicas = types.Int64Value(int64(resp.GetReplicas())) - rm.RetentionDays = types.Int64Value(resp.GetRetentionDays()) - rm.Status = types.StringValue(string(resp.GetStatus())) - rm.Storage = stor - rm.Version = types.StringValue(string(resp.GetVersion())) - } else if dm, ok := model.(*dataSourceModel); ok { - dm.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) - dm.Edition = types.StringValue(string(resp.GetEdition())) - dm.Encryption = handleEncryption(dm.Encryption, resp) - dm.FlavorId = types.StringValue(resp.GetFlavorId()) - dm.Id = types.StringValue(resp.GetId()) - dm.InstanceId = types.StringValue(resp.GetId()) - dm.IsDeletable = types.BoolValue(resp.GetIsDeletable()) - dm.Name = types.StringValue(resp.GetName()) - dm.Network = net - dm.Replicas = types.Int64Value(int64(resp.GetReplicas())) - dm.RetentionDays = types.Int64Value(resp.GetRetentionDays()) - dm.Status = types.StringValue(string(resp.GetStatus())) - dm.Storage = stor - dm.Version = types.StringValue(string(resp.GetVersion())) +func mapDataResponseToModel( + ctx context.Context, + resp *sqlserverflexalpha.GetInstanceResponse, + m *dataSourceModel, + tfDiags diag.Diagnostics, +) error { + m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) + m.Edition = types.StringValue(string(resp.GetEdition())) + m.Encryption = handleDSEncryption(m, resp) + m.FlavorId = types.StringValue(resp.GetFlavorId()) + m.Id = types.StringValue(resp.GetId()) + m.InstanceId = types.StringValue(resp.GetId()) + m.IsDeletable = types.BoolValue(resp.GetIsDeletable()) + m.Name = types.StringValue(resp.GetName()) + netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl()) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf( + "error converting network acl response value", + ) } + net, diags := sqlserverflexalphaDataGen.NewNetworkValue( + sqlserverflexalphaDataGen.NetworkValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "access_scope": types.StringValue(string(resp.Network.GetAccessScope())), + "acl": netAcl, + "instance_address": types.StringValue(resp.Network.GetInstanceAddress()), + "router_address": types.StringValue(resp.Network.GetRouterAddress()), + }, + ) + tfDiags.Append(diags...) + if diags.HasError() { + return errors.New("error converting network response value") + } + m.Network = net + m.Replicas = types.Int64Value(int64(resp.GetReplicas())) + m.RetentionDays = types.Int64Value(resp.GetRetentionDays()) + m.Status = types.StringValue(string(resp.GetStatus())) + stor, diags := sqlserverflexalphaDataGen.NewStorageValue( + sqlserverflexalphaDataGen.StorageValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "class": types.StringValue(resp.Storage.GetClass()), + "size": types.Int64Value(resp.Storage.GetSize()), + }, + ) + tfDiags.Append(diags...) + if diags.HasError() { + return fmt.Errorf("error converting storage response value") + } + m.Storage = stor + + m.Version = types.StringValue(string(resp.GetVersion())) return nil } func handleEncryption( - encryptionValue sqlserverflexResGen.EncryptionValue, - resp *sqlserverflex.GetInstanceResponse, -) sqlserverflexResGen.EncryptionValue { + m *sqlserverflexalphaResGen.InstanceModel, + resp *sqlserverflexalpha.GetInstanceResponse, +) sqlserverflexalphaResGen.EncryptionValue { if !resp.HasEncryption() || resp.Encryption == nil || resp.Encryption.KekKeyId == nil || resp.Encryption.KekKeyRingId == nil || resp.Encryption.KekKeyVersion == nil || resp.Encryption.ServiceAccount == nil { - if encryptionValue.IsNull() || encryptionValue.IsUnknown() { - return sqlserverflexResGen.NewEncryptionValueNull() + if m.Encryption.IsNull() || m.Encryption.IsUnknown() { + return sqlserverflexalphaResGen.NewEncryptionValueNull() } - return encryptionValue + return m.Encryption } - enc := sqlserverflexResGen.NewEncryptionValueNull() + enc := sqlserverflexalphaResGen.NewEncryptionValueNull() + if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { + enc.KekKeyId = types.StringValue(kVal) + } + if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { + enc.KekKeyRingId = types.StringValue(kkVal) + } + if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { + enc.KekKeyVersion = types.StringValue(kkvVal) + } + if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { + enc.ServiceAccount = types.StringValue(sa) + } + return enc +} + +func handleDSEncryption( + m *dataSourceModel, + resp *sqlserverflexalpha.GetInstanceResponse, +) sqlserverflexalphaDataGen.EncryptionValue { + if !resp.HasEncryption() || + resp.Encryption == nil || + resp.Encryption.KekKeyId == nil || + resp.Encryption.KekKeyRingId == nil || + resp.Encryption.KekKeyVersion == nil || + resp.Encryption.ServiceAccount == nil { + if m.Encryption.IsNull() || m.Encryption.IsUnknown() { + return sqlserverflexalphaDataGen.NewEncryptionValueNull() + } + return m.Encryption + } + + enc := sqlserverflexalphaDataGen.NewEncryptionValueNull() if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { enc.KekKeyId = types.StringValue(kVal) } @@ -129,25 +195,25 @@ func handleEncryption( func toCreatePayload( ctx context.Context, - model *sqlserverflexResGen.InstanceModel, -) (*sqlserverflex.CreateInstanceRequestPayload, error) { + model *sqlserverflexalphaResGen.InstanceModel, +) (*sqlserverflexalpha.CreateInstanceRequestPayload, error) { if model == nil { return nil, fmt.Errorf("nil model") } - storagePayload := &sqlserverflex.CreateInstanceRequestPayloadGetStorageArgType{} + storagePayload := &sqlserverflexalpha.CreateInstanceRequestPayloadGetStorageArgType{} if !model.Storage.IsNull() && !model.Storage.IsUnknown() { storagePayload.Class = model.Storage.Class.ValueStringPointer() storagePayload.Size = model.Storage.Size.ValueInt64Pointer() } - var encryptionPayload *sqlserverflex.CreateInstanceRequestPayloadGetEncryptionArgType = nil + var encryptionPayload *sqlserverflexalpha.CreateInstanceRequestPayloadGetEncryptionArgType = nil if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() && !model.Encryption.KekKeyId.IsNull() && model.Encryption.KekKeyId.IsUnknown() && model.Encryption.KekKeyId.ValueString() != "" && !model.Encryption.KekKeyRingId.IsNull() && !model.Encryption.KekKeyRingId.IsUnknown() && model.Encryption.KekKeyRingId.ValueString() != "" && !model.Encryption.KekKeyVersion.IsNull() && !model.Encryption.KekKeyVersion.IsUnknown() && model.Encryption.KekKeyVersion.ValueString() != "" && !model.Encryption.ServiceAccount.IsNull() && !model.Encryption.ServiceAccount.IsUnknown() && model.Encryption.ServiceAccount.ValueString() != "" { - encryptionPayload = &sqlserverflex.CreateInstanceRequestPayloadGetEncryptionArgType{ + encryptionPayload = &sqlserverflexalpha.CreateInstanceRequestPayloadGetEncryptionArgType{ KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(), KekKeyRingId: model.Encryption.KekKeyVersion.ValueStringPointer(), KekKeyVersion: model.Encryption.KekKeyRingId.ValueStringPointer(), @@ -155,9 +221,9 @@ func toCreatePayload( } } - networkPayload := &sqlserverflex.CreateInstanceRequestPayloadGetNetworkArgType{} + networkPayload := &sqlserverflexalpha.CreateInstanceRequestPayloadGetNetworkArgType{} if !model.Network.IsNull() && !model.Network.IsUnknown() { - networkPayload.AccessScope = sqlserverflex.CreateInstanceRequestPayloadNetworkGetAccessScopeAttributeType( + networkPayload.AccessScope = sqlserverflexalpha.CreateInstanceRequestPayloadNetworkGetAccessScopeAttributeType( model.Network.AccessScope.ValueStringPointer(), ) @@ -169,7 +235,7 @@ func toCreatePayload( networkPayload.Acl = &resList } - return &sqlserverflex.CreateInstanceRequestPayload{ + return &sqlserverflexalpha.CreateInstanceRequestPayload{ BackupSchedule: conversion.StringValueToPointer(model.BackupSchedule), Encryption: encryptionPayload, FlavorId: conversion.StringValueToPointer(model.FlavorId), @@ -177,20 +243,25 @@ func toCreatePayload( Network: networkPayload, RetentionDays: conversion.Int64ValueToPointer(model.RetentionDays), Storage: storagePayload, - Version: sqlserverflex.CreateInstanceRequestPayloadGetVersionAttributeType(conversion.StringValueToPointer(model.Version)), + Version: sqlserverflexalpha.CreateInstanceRequestPayloadGetVersionAttributeType( + conversion.StringValueToPointer(model.Version), + ), }, nil + } -// TODO: check func with his args func toUpdatePayload( ctx context.Context, - m *sqlserverflexResGen.InstanceModel, + m *sqlserverflexalphaResGen.InstanceModel, resp *resource.UpdateResponse, -) (*sqlserverflex.UpdateInstanceRequestPayload, error) { +) (*sqlserverflexalpha.UpdateInstanceRequestPayload, error) { + if m == nil { + return nil, fmt.Errorf("nil model") + } if m.Replicas.ValueInt64() > math.MaxUint32 { return nil, fmt.Errorf("replicas value is too big for uint32") } - replVal := sqlserverflex.Replicas(uint32(m.Replicas.ValueInt64())) // nolint:gosec // check is performed above + replVal := sqlserverflexalpha.Replicas(uint32(m.Replicas.ValueInt64())) // nolint:gosec // check is performed above var netAcl []string diags := m.Network.Acl.ElementsAs(ctx, &netAcl, false) @@ -198,16 +269,16 @@ func toUpdatePayload( if diags.HasError() { return nil, fmt.Errorf("error converting model network acl value") } - return &sqlserverflex.UpdateInstanceRequestPayload{ + return &sqlserverflexalpha.UpdateInstanceRequestPayload{ BackupSchedule: m.BackupSchedule.ValueStringPointer(), FlavorId: m.FlavorId.ValueStringPointer(), Name: m.Name.ValueStringPointer(), - Network: &sqlserverflex.UpdateInstanceRequestPayloadNetwork{ - Acl: &netAcl, - }, - Replicas: &replVal, - RetentionDays: m.RetentionDays.ValueInt64Pointer(), - Storage: &sqlserverflex.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()}, - Version: sqlserverflex.UpdateInstanceRequestPayloadGetVersionAttributeType(m.Version.ValueStringPointer()), + Network: sqlserverflexalpha.NewUpdateInstanceRequestPayloadNetwork(netAcl), + Replicas: &replVal, + RetentionDays: m.RetentionDays.ValueInt64Pointer(), + Storage: &sqlserverflexalpha.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()}, + Version: sqlserverflexalpha.UpdateInstanceRequestPayloadGetVersionAttributeType( + m.Version.ValueStringPointer(), + ), }, nil } diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go index 36b4d2e7..41f9027c 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go @@ -1,5 +1,3 @@ -// Copyright (c) STACKIT - package sqlserverflexalpha import ( @@ -10,26 +8,25 @@ import ( "strings" "time" - "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" - - sqlserverflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" - sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" - "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/stackitcloud/stackit-sdk-go/core/config" + "github.com/stackitcloud/stackit-sdk-go/core/oapierror" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha" - "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen" ) -// Ensure the implementation satisfies the expected interfaces. var ( _ resource.Resource = &instanceResource{} _ resource.ResourceWithConfigure = &instanceResource{} @@ -38,19 +35,17 @@ var ( _ resource.ResourceWithIdentity = &instanceResource{} ) -// NewInstanceResource is a helper function to simplify the provider implementation. func NewInstanceResource() resource.Resource { return &instanceResource{} } -//nolint:unused // TODO: remove if not needed later -var validNodeTypes []string = []string{ - "Single", - "Replica", +type instanceResource struct { + client *sqlserverflexalpha.APIClient + providerData core.ProviderData } // resourceModel describes the resource data model. -type resourceModel = sqlserverflexalpha2.InstanceModel +type resourceModel = sqlserverflexalphaResGen.InstanceModel type InstanceResourceIdentityModel struct { ProjectID types.String `tfsdk:"project_id"` @@ -58,93 +53,19 @@ type InstanceResourceIdentityModel struct { InstanceID types.String `tfsdk:"instance_id"` } -// instanceResource is the resource implementation. -type instanceResource struct { - client *sqlserverflexalpha.APIClient - providerData core.ProviderData -} - -// Metadata returns the resource type name. -func (r *instanceResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_instance" -} - -// Configure adds the provider configured client to the resource. -func (r *instanceResource) Configure( +func (r *instanceResource) Metadata( ctx context.Context, - req resource.ConfigureRequest, - resp *resource.ConfigureResponse, + req resource.MetadataRequest, + resp *resource.MetadataResponse, ) { - var ok bool - r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) - if !ok { - return - } - - apiClient := sqlserverflexUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - r.client = apiClient - tflog.Info(ctx, "SQLServer Flex instance client configured") -} - -// ModifyPlan implements resource.ResourceWithModifyPlan. -// Use the modifier to set the effective region in the current plan. -func (r *instanceResource) ModifyPlan( - ctx context.Context, - req resource.ModifyPlanRequest, - resp *resource.ModifyPlanResponse, -) { // nolint:gocritic // function signature required by Terraform - - // skip initial empty configuration to avoid follow-up errors - if req.Config.Raw.IsNull() { - return - } - var configModel sqlserverflexalpha2.InstanceModel - resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) - if resp.Diagnostics.HasError() { - return - } - - if req.Plan.Raw.IsNull() { - return - } - var planModel sqlserverflexalpha2.InstanceModel - resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) - if resp.Diagnostics.HasError() { - return - } - - utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp) - if resp.Diagnostics.HasError() { - return - } - - var identityModel InstanceResourceIdentityModel - identityModel.ProjectID = planModel.ProjectId - identityModel.Region = planModel.Region - if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { - identityModel.InstanceID = planModel.InstanceId - } - - resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) - if resp.Diagnostics.HasError() { - return - } + resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_instance" } //go:embed planModifiers.yaml var modifiersFileByte []byte -// Schema defines the schema for the resource. -func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - schema := sqlserverflexalpha2.InstanceResourceSchema(ctx) +func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + s := sqlserverflexalphaResGen.InstanceResourceSchema(ctx) fields, err := utils.ReadModifiersConfig(modifiersFileByte) if err != nil { @@ -152,12 +73,12 @@ func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, return } - err = utils.AddPlanModifiersToResourceSchema(fields, &schema) + err = utils.AddPlanModifiersToResourceSchema(fields, &s) if err != nil { resp.Diagnostics.AddError("error adding plan modifiers", err.Error()) return } - resp.Schema = schema + resp.Schema = s } func (r *instanceResource) IdentitySchema( @@ -180,15 +101,355 @@ func (r *instanceResource) IdentitySchema( } } -// Create creates the resource and sets the initial Terraform state. -func (r *instanceResource) Create( +// Configure adds the provider configured client to the resource. +func (r *instanceResource) Configure( ctx context.Context, - req resource.CreateRequest, - resp *resource.CreateResponse, + req resource.ConfigureRequest, + resp *resource.ConfigureResponse, +) { + var ok bool + r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + if !ok { + return + } + + apiClientConfigOptions := []config.ConfigurationOption{ + config.WithCustomAuth(r.providerData.RoundTripper), + utils.UserAgentConfigOption(r.providerData.Version), + } + if r.providerData.SQLServerFlexCustomEndpoint != "" { + apiClientConfigOptions = append( + apiClientConfigOptions, + config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint), + ) + } else { + apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion())) + } + apiClient, err := sqlserverflexalpha.NewAPIClient(apiClientConfigOptions...) + if err != nil { + resp.Diagnostics.AddError( + "Error configuring API client", + fmt.Sprintf( + "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", + err, + ), + ) + return + } + r.client = apiClient + tflog.Info(ctx, "sqlserverflexalpha.Instance client configured") +} + +// ModifyPlan implements resource.ResourceWithModifyPlan. +// Use the modifier to set the effective region in the current plan. +func (r *instanceResource) ModifyPlan( + ctx context.Context, + req resource.ModifyPlanRequest, + resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - var model resourceModel - diags := req.Plan.Get(ctx, &model) - resp.Diagnostics.Append(diags...) + + // skip initial empty configuration to avoid follow-up errors + if req.Config.Raw.IsNull() { + return + } + var configModel resourceModel + resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...) + if resp.Diagnostics.HasError() { + return + } + + if req.Plan.Raw.IsNull() { + return + } + var planModel resourceModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) + if resp.Diagnostics.HasError() { + return + } + + utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) + if resp.Diagnostics.HasError() { + return + } +} + +func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data resourceModel + crateErr := "[SQL Server Flex BETA - Create] error" + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + // Generate API request body from model + payload, err := toCreatePayload(ctx, &data) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + crateErr, + fmt.Sprintf("Creating API payload: %v", err), + ) + return + } + // Create new Instance + createResp, err := r.client.CreateInstanceRequest( + ctx, + projectId, + region, + ).CreateInstanceRequestPayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, crateErr, fmt.Sprintf("Calling API: %v", err)) + return + } + + ctx = core.LogResponse(ctx) + + InstanceId := *createResp.Id + + // Example data value setting + data.InstanceId = types.StringValue("id-from-response") + + identity := InstanceResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(InstanceId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + waitResp, err := wait.CreateInstanceWaitHandler( + ctx, + r.client, + projectId, + InstanceId, + region, + ).SetSleepBeforeWait( + 10 * time.Second, + ).SetTimeout( + 90 * time.Minute, + ).WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + crateErr, + fmt.Sprintf("Instance creation waiting: %v", err), + ) + return + } + + if waitResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + crateErr, + "Instance creation waiting: returned id is nil", + ) + return + } + + // Map response body to schema + err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + crateErr, + fmt.Sprintf("processing API payload: %v", err), + ) + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + tflog.Info(ctx, "sqlserverflexalpha.Instance created") +} + +func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data resourceModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Read identity data + var identityData InstanceResourceIdentityModel + resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + instanceId := data.InstanceId.ValueString() + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() + if err != nil { + oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped + if ok && oapiErr.StatusCode == http.StatusNotFound { + resp.State.RemoveResource(ctx) + return + } + core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", err.Error()) + return + } + + ctx = core.LogResponse(ctx) + + // Map response body to schema + err = mapResponseToModel(ctx, instanceResp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error reading instance", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + // Save identity into Terraform state + identity := InstanceResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Info(ctx, "sqlserverflexalpha.Instance read") +} + +func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var data resourceModel + updateInstanceError := "Error updating instance" + + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + ctx = core.InitProviderContext(ctx) + + projectId := data.ProjectId.ValueString() + region := data.Region.ValueString() + ctx = tflog.SetField(ctx, "project_id", projectId) + ctx = tflog.SetField(ctx, "region", region) + + instanceId := data.InstanceId.ValueString() + ctx = tflog.SetField(ctx, "instance_id", instanceId) + + // Generate API request body from model + payload, err := toUpdatePayload(ctx, &data, resp) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + updateInstanceError, + fmt.Sprintf("Creating API payload: %v", err), + ) + return + } + // Update existing instance + err = r.client.UpdateInstanceRequest( + ctx, + projectId, + region, + instanceId, + ).UpdateInstanceRequestPayload(*payload).Execute() + if err != nil { + core.LogAndAddError(ctx, &resp.Diagnostics, updateInstanceError, err.Error()) + return + } + + ctx = core.LogResponse(ctx) + + waitResp, err := wait. + UpdateInstanceWaitHandler(ctx, r.client, projectId, instanceId, region). + SetSleepBeforeWait(15 * time.Second). + SetTimeout(45 * time.Minute). + WaitWithContext(ctx) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + updateInstanceError, + fmt.Sprintf("Instance update waiting: %v", err), + ) + return + } + + // Map response body to schema + err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + updateInstanceError, + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } + + identity := InstanceResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Info(ctx, "sqlserverflexalpha.Instance updated") +} + +func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data resourceModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) if resp.Diagnostics.HasError() { return } @@ -207,284 +468,8 @@ func (r *instanceResource) Create( ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "region", region) - // Generate API request body from model - payload, err := toCreatePayload(ctx, &model) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error creating instance", - fmt.Sprintf("Creating API payload: %v", err), - ) - return - } - // Create new instance - createResp, err := r.client.CreateInstanceRequest( - ctx, - projectId, - region, - ).CreateInstanceRequestPayload(*payload).Execute() - if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating instance", fmt.Sprintf("Calling API: %v", err)) - return - } - - ctx = core.LogResponse(ctx) - - instanceId := *createResp.Id - - // Set data returned by API in identity - identity := InstanceResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - InstanceID: types.StringValue(instanceId), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { - return - } - - utils.SetAndLogStateFields( - ctx, &resp.Diagnostics, &resp.State, map[string]any{ - "id": utils.BuildInternalTerraformId(projectId, region, instanceId), - "instance_id": instanceId, - }, - ) - if resp.Diagnostics.HasError() { - return - } - - // The creation waiter sometimes returns an error from the API: "instance with id xxx has unexpected status Failure" - // which can be avoided by sleeping before wait - waitResp, err := wait.CreateInstanceWaitHandler( - ctx, - r.client, - projectId, - instanceId, - region, - ).SetSleepBeforeWait( - 30 * time.Second, - ).SetTimeout( - 90 * time.Minute, - ).WaitWithContext(ctx) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error creating instance", - fmt.Sprintf("Instance creation waiting: %v", err), - ) - return - } - - if waitResp.FlavorId == nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error creating instance", - "Instance creation waiting: returned flavor id is nil", - ) - return - } - - // Map response body to schema - // err = mapFields(ctx, waitResp, &model, storage, encryption, network, region) - err = mapFields(ctx, waitResp, &model, resp.Diagnostics) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error creating instance", - fmt.Sprintf("Processing API payload: %v", err), - ) - return - } - // Set state to fully populated data - diags = resp.State.Set(ctx, model) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - tflog.Info(ctx, "SQLServer Flex instance created") -} - -// Read refreshes the Terraform state with the latest data. -func (r *instanceResource) Read( - ctx context.Context, - req resource.ReadRequest, - resp *resource.ReadResponse, -) { // nolint:gocritic // function signature required by Terraform - var model resourceModel - diags := req.State.Get(ctx, &model) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - // Read identity data - var identityData InstanceResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId := model.ProjectId.ValueString() - instanceId := model.InstanceId.ValueString() - region := r.providerData.GetRegionWithOverride(model.Region) - - ctx = tflog.SetField(ctx, "project_id", projectId) + instanceId := identityData.InstanceID.ValueString() ctx = tflog.SetField(ctx, "instance_id", instanceId) - ctx = tflog.SetField(ctx, "region", region) - - instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() - if err != nil { - oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped - if ok && oapiErr.StatusCode == http.StatusNotFound { - resp.State.RemoveResource(ctx) - return - } - core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", err.Error()) - return - } - - ctx = core.LogResponse(ctx) - - // Map response body to schema - err = mapFields(ctx, instanceResp, &model, resp.Diagnostics) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error reading instance", - fmt.Sprintf("Processing API payload: %v", err), - ) - return - } - // Set refreshed state - diags = resp.State.Set(ctx, model) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - // Set data returned by API in identity - identity := InstanceResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - InstanceID: types.StringValue(instanceId), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { - return - } - tflog.Info(ctx, "SQLServer Flex instance read") -} - -// Update updates the resource and sets the updated Terraform state on success. -func (r *instanceResource) Update( - ctx context.Context, - req resource.UpdateRequest, - resp *resource.UpdateResponse, -) { // nolint:gocritic // function signature required by Terraform - // Retrieve values from plan - var model resourceModel - diags := req.Plan.Get(ctx, &model) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId := model.ProjectId.ValueString() - instanceId := model.InstanceId.ValueString() - region := model.Region.ValueString() - - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "instance_id", instanceId) - ctx = tflog.SetField(ctx, "region", region) - - // Generate API request body from model - payload, err := toUpdatePayload(ctx, &model, resp) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error updating instance", - fmt.Sprintf("Creating API payload: %v", err), - ) - return - } - // Update existing instance - err = r.client.UpdateInstanceRequest( - ctx, - projectId, - region, - instanceId, - ).UpdateInstanceRequestPayload(*payload).Execute() - if err != nil { - core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", err.Error()) - return - } - - ctx = core.LogResponse(ctx) - - waitResp, err := wait.UpdateInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error updating instance", - fmt.Sprintf("Instance update waiting: %v", err), - ) - return - } - - // Map response body to schema - err = mapFields(ctx, waitResp, &model, resp.Diagnostics) - // err = mapFields(ctx, waitResp, &model, storage, encryption, network, region) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error updating instance", - fmt.Sprintf("Processing API payload: %v", err), - ) - return - } - diags = resp.State.Set(ctx, model) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - tflog.Info(ctx, "SQLServer Flex instance updated") -} - -// Delete deletes the resource and removes the Terraform state on success. -func (r *instanceResource) Delete( - ctx context.Context, - req resource.DeleteRequest, - resp *resource.DeleteResponse, -) { // nolint:gocritic // function signature required by Terraform - // Retrieve values from state - var model resourceModel - diags := req.State.Get(ctx, &model) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId := model.ProjectId.ValueString() - instanceId := model.InstanceId.ValueString() - region := model.Region.ValueString() - ctx = tflog.SetField(ctx, "project_id", projectId) - ctx = tflog.SetField(ctx, "instance_id", instanceId) - ctx = tflog.SetField(ctx, "region", region) // Delete existing instance err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute() @@ -495,7 +480,7 @@ func (r *instanceResource) Delete( ctx = core.LogResponse(ctx) - _, err = wait.DeleteInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx) + delResp, err := wait.DeleteInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx) if err != nil { core.LogAndAddError( ctx, @@ -505,16 +490,31 @@ func (r *instanceResource) Delete( ) return } - tflog.Info(ctx, "SQLServer Flex instance deleted") + + if delResp != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error deleting instance", + "wait handler returned non nil result", + ) + return + } + + resp.State.RemoveResource(ctx) + + tflog.Info(ctx, "sqlserverflexalpha.Instance deleted") } // ImportState imports a resource into the Terraform state on success. -// The expected format of the resource import identifier is: project_id,instance_id +// The expected format of the resource import identifier is: project_id,zone_id,record_set_id func (r *instanceResource) ImportState( ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse, ) { + ctx = core.InitProviderContext(ctx) + if req.ID != "" { idParts := strings.Split(req.ID, core.Separator) @@ -551,5 +551,5 @@ func (r *instanceResource) ImportState( resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...) - tflog.Info(ctx, "SQLServer Flex instance state imported") + tflog.Info(ctx, "sqlserverflexalpha instance state imported") } diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go b/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go deleted file mode 100644 index bc9f6d3a..00000000 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go +++ /dev/null @@ -1,823 +0,0 @@ -package sqlserverflexalpha - -// type sqlserverflexClientMocked struct { -// returnError bool -// listFlavorsResp *sqlserverflex.GetFlavorsResponse -// } -// -// func (c *sqlserverflexClientMocked) GetFlavorsExecute(_ context.Context, _, _ string) (*sqlserverflex.GetFlavorsResponse, error) { -// if c.returnError { -// return nil, fmt.Errorf("get flavors failed") -// } -// -// return c.listFlavorsResp, nil -// } - -// func TestMapFields(t *testing.T) { -// t.Skip("Skipping - needs refactoring") -// const testRegion = "region" -// tests := []struct { -// description string -// state Model -// input *sqlserverflex.GetInstanceResponse -// storage *storageModel -// encryption *encryptionModel -// network *networkModel -// region string -// expected Model -// isValid bool -// }{ -// { -// "default_values", -// Model{ -// InstanceId: types.StringValue("iid"), -// ProjectId: types.StringValue("pid"), -// Replicas: types.Int64Value(1), -// RetentionDays: types.Int64Value(1), -// Version: types.StringValue("v1"), -// Edition: types.StringValue("edition 1"), -// Status: types.StringValue("status"), -// IsDeletable: types.BoolValue(true), -// }, -// &sqlserverflex.GetInstanceResponse{ -// FlavorId: utils.Ptr("flavor_id"), -// Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(1))), -// RetentionDays: utils.Ptr(int64(1)), -// Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("v1")), -// Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition 1")), -// Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), -// IsDeletable: utils.Ptr(true), -// }, -// &storageModel{}, -// &encryptionModel{}, -// &networkModel{ -// ACL: types.ListNull(basetypes.StringType{}), -// }, -// testRegion, -// Model{ -// Id: types.StringValue("pid,region,iid"), -// InstanceId: types.StringValue("iid"), -// ProjectId: types.StringValue("pid"), -// Name: types.StringNull(), -// BackupSchedule: types.StringNull(), -// Replicas: types.Int64Value(1), -// Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ -// "class": types.StringNull(), -// "size": types.Int64Null(), -// }), -// Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{ -// "keyring_id": types.StringNull(), -// "key_id": types.StringNull(), -// "key_version": types.StringNull(), -// "service_account": types.StringNull(), -// }), -// Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{ -// "acl": types.ListNull(types.StringType), -// "access_scope": types.StringNull(), -// "instance_address": types.StringNull(), -// "router_address": types.StringNull(), -// }), -// IsDeletable: types.BoolValue(true), -// Edition: types.StringValue("edition 1"), -// Status: types.StringValue("status"), -// RetentionDays: types.Int64Value(1), -// Version: types.StringValue("v1"), -// Region: types.StringValue(testRegion), -// }, -// true, -// }, -// { -// "simple_values", -// Model{ -// InstanceId: types.StringValue("iid"), -// ProjectId: types.StringValue("pid"), -// }, -// &sqlserverflex.GetInstanceResponse{ -// BackupSchedule: utils.Ptr("schedule"), -// FlavorId: utils.Ptr("flavor_id"), -// Id: utils.Ptr("iid"), -// Name: utils.Ptr("name"), -// Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))), -// Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), -// Storage: &sqlserverflex.Storage{ -// Class: utils.Ptr("class"), -// Size: utils.Ptr(int64(78)), -// }, -// Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")), -// RetentionDays: utils.Ptr(int64(1)), -// Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")), -// IsDeletable: utils.Ptr(true), -// Encryption: nil, -// Network: &sqlserverflex.InstanceNetwork{ -// AccessScope: nil, -// Acl: &[]string{ -// "ip1", -// "ip2", -// "", -// }, -// InstanceAddress: nil, -// RouterAddress: nil, -// }, -// }, -// &storageModel{}, -// &encryptionModel{}, -// &networkModel{ -// ACL: types.ListValueMust(basetypes.StringType{}, []attr.Value{ -// types.StringValue("ip1"), -// types.StringValue("ip2"), -// types.StringValue(""), -// }), -// }, -// testRegion, -// Model{ -// Id: types.StringValue("pid,region,iid"), -// InstanceId: types.StringValue("iid"), -// ProjectId: types.StringValue("pid"), -// Name: types.StringValue("name"), -// BackupSchedule: types.StringValue("schedule"), -// Replicas: types.Int64Value(56), -// Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ -// "class": types.StringValue("class"), -// "size": types.Int64Value(78), -// }), -// Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{ -// "acl": types.ListValueMust(types.StringType, []attr.Value{ -// types.StringValue("ip1"), -// types.StringValue("ip2"), -// types.StringValue(""), -// }), -// "access_scope": types.StringNull(), -// "instance_address": types.StringNull(), -// "router_address": types.StringNull(), -// }), -// Edition: types.StringValue("edition"), -// RetentionDays: types.Int64Value(1), -// Version: types.StringValue("version"), -// Region: types.StringValue(testRegion), -// IsDeletable: types.BoolValue(true), -// Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{ -// "keyring_id": types.StringNull(), -// "key_id": types.StringNull(), -// "key_version": types.StringNull(), -// "service_account": types.StringNull(), -// }), -// Status: types.StringValue("status"), -// }, -// true, -// }, -// // { -// // "simple_values_no_flavor_and_storage", -// // Model{ -// // InstanceId: types.StringValue("iid"), -// // ProjectId: types.StringValue("pid"), -// // }, -// // &sqlserverflex.GetInstanceResponse{ -// // Acl: &[]string{ -// // "ip1", -// // "ip2", -// // "", -// // }, -// // BackupSchedule: utils.Ptr("schedule"), -// // FlavorId: nil, -// // Id: utils.Ptr("iid"), -// // Name: utils.Ptr("name"), -// // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))), -// // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), -// // Storage: nil, -// // Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")), -// // RetentionDays: utils.Ptr(int64(1)), -// // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")), -// // }, -// // &flavorModel{ -// // CPU: types.Int64Value(12), -// // RAM: types.Int64Value(34), -// // }, -// // &storageModel{ -// // Class: types.StringValue("class"), -// // Size: types.Int64Value(78), -// // }, -// // &optionsModel{ -// // Edition: types.StringValue("edition"), -// // RetentionDays: types.Int64Value(1), -// // }, -// // testRegion, -// // Model{ -// // Id: types.StringValue("pid,region,iid"), -// // InstanceId: types.StringValue("iid"), -// // ProjectId: types.StringValue("pid"), -// // Name: types.StringValue("name"), -// // ACL: types.ListValueMust(types.StringType, []attr.Value{ -// // types.StringValue("ip1"), -// // types.StringValue("ip2"), -// // types.StringValue(""), -// // }), -// // BackupSchedule: types.StringValue("schedule"), -// // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{ -// // "id": types.StringNull(), -// // "description": types.StringNull(), -// // "cpu": types.Int64Value(12), -// // "ram": types.Int64Value(34), -// // }), -// // Replicas: types.Int64Value(56), -// // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ -// // "class": types.StringValue("class"), -// // "size": types.Int64Value(78), -// // }), -// // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{ -// // "edition": types.StringValue("edition"), -// // "retention_days": types.Int64Value(1), -// // }), -// // Version: types.StringValue("version"), -// // Region: types.StringValue(testRegion), -// // }, -// // true, -// // }, -// // { -// // "acls_unordered", -// // Model{ -// // InstanceId: types.StringValue("iid"), -// // ProjectId: types.StringValue("pid"), -// // ACL: types.ListValueMust(types.StringType, []attr.Value{ -// // types.StringValue("ip2"), -// // types.StringValue(""), -// // types.StringValue("ip1"), -// // }), -// // }, -// // &sqlserverflex.GetInstanceResponse{ -// // Acl: &[]string{ -// // "", -// // "ip1", -// // "ip2", -// // }, -// // BackupSchedule: utils.Ptr("schedule"), -// // FlavorId: nil, -// // Id: utils.Ptr("iid"), -// // Name: utils.Ptr("name"), -// // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))), -// // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")), -// // Storage: nil, -// // //Options: &map[string]string{ -// // // "edition": "edition", -// // // "retentionDays": "1", -// // //}, -// // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")), -// // }, -// // &flavorModel{ -// // CPU: types.Int64Value(12), -// // RAM: types.Int64Value(34), -// // }, -// // &storageModel{ -// // Class: types.StringValue("class"), -// // Size: types.Int64Value(78), -// // }, -// // &optionsModel{}, -// // testRegion, -// // Model{ -// // Id: types.StringValue("pid,region,iid"), -// // InstanceId: types.StringValue("iid"), -// // ProjectId: types.StringValue("pid"), -// // Name: types.StringValue("name"), -// // ACL: types.ListValueMust(types.StringType, []attr.Value{ -// // types.StringValue("ip2"), -// // types.StringValue(""), -// // types.StringValue("ip1"), -// // }), -// // BackupSchedule: types.StringValue("schedule"), -// // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{ -// // "id": types.StringNull(), -// // "description": types.StringNull(), -// // "cpu": types.Int64Value(12), -// // "ram": types.Int64Value(34), -// // }), -// // Replicas: types.Int64Value(56), -// // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{ -// // "class": types.StringValue("class"), -// // "size": types.Int64Value(78), -// // }), -// // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{ -// // "edition": types.StringValue("edition"), -// // "retention_days": types.Int64Value(1), -// // }), -// // Version: types.StringValue("version"), -// // Region: types.StringValue(testRegion), -// // }, -// // true, -// // }, -// // { -// // "nil_response", -// // Model{ -// // InstanceId: types.StringValue("iid"), -// // ProjectId: types.StringValue("pid"), -// // }, -// // nil, -// // &flavorModel{}, -// // &storageModel{}, -// // &optionsModel{}, -// // testRegion, -// // Model{}, -// // false, -// // }, -// // { -// // "no_resource_id", -// // Model{ -// // InstanceId: types.StringValue("iid"), -// // ProjectId: types.StringValue("pid"), -// // }, -// // &sqlserverflex.GetInstanceResponse{}, -// // &flavorModel{}, -// // &storageModel{}, -// // &optionsModel{}, -// // testRegion, -// // Model{}, -// // false, -// // }, -// } -// for _, tt := range tests { -// t.Run(tt.description, func(t *testing.T) { -// err := mapFields(context.Background(), tt.input, &tt.state, tt.storage, tt.encryption, tt.network, tt.region) -// if !tt.isValid && err == nil { -// t.Fatalf("Should have failed") -// } -// if tt.isValid && err != nil { -// t.Fatalf("Should not have failed: %v", err) -// } -// if tt.isValid { -// diff := cmp.Diff(tt.state, tt.expected) -// if diff != "" { -// t.Fatalf("Data does not match: %s", diff) -// } -// } -// }) -// } -//} - -// func TestToCreatePayload(t *testing.T) { -// tests := []struct { -// description string -// input *Model -// inputAcl []string -// inputFlavor *flavorModel -// inputStorage *storageModel -// inputOptions *optionsModel -// expected *sqlserverflex.CreateInstanceRequestPayload -// isValid bool -// }{ -// { -// "default_values", -// &Model{}, -// []string{}, -// &flavorModel{}, -// &storageModel{}, -// &optionsModel{}, -// &sqlserverflex.CreateInstanceRequestPayload{ -// Acl: &sqlserverflex.CreateInstanceRequestPayloadGetAclArgType{}, -// Storage: &sqlserverflex.CreateInstanceRequestPayloadGetStorageArgType{}, -// }, -// true, -// }, -// { -// "simple_values", -// &Model{ -// BackupSchedule: types.StringValue("schedule"), -// Name: types.StringValue("name"), -// Replicas: types.Int64Value(12), -// Version: types.StringValue("version"), -// }, -// []string{ -// "ip_1", -// "ip_2", -// }, -// &flavorModel{ -// Id: types.StringValue("flavor_id"), -// }, -// &storageModel{ -// Class: types.StringValue("class"), -// Size: types.Int64Value(34), -// }, -// &optionsModel{ -// Edition: types.StringValue("edition"), -// RetentionDays: types.Int64Value(1), -// }, -// &sqlserverflex.CreateInstancePayload{ -// Acl: &sqlserverflex.CreateInstancePayloadAcl{ -// Items: &[]string{ -// "ip_1", -// "ip_2", -// }, -// }, -// BackupSchedule: utils.Ptr("schedule"), -// FlavorId: utils.Ptr("flavor_id"), -// Name: utils.Ptr("name"), -// Storage: &sqlserverflex.CreateInstancePayloadStorage{ -// Class: utils.Ptr("class"), -// Size: utils.Ptr(int64(34)), -// }, -// Options: &sqlserverflex.CreateInstancePayloadOptions{ -// Edition: utils.Ptr("edition"), -// RetentionDays: utils.Ptr("1"), -// }, -// Version: utils.Ptr("version"), -// }, -// true, -// }, -// { -// "null_fields_and_int_conversions", -// &Model{ -// BackupSchedule: types.StringNull(), -// Name: types.StringNull(), -// Replicas: types.Int64Value(2123456789), -// Version: types.StringNull(), -// }, -// []string{ -// "", -// }, -// &flavorModel{ -// Id: types.StringNull(), -// }, -// &storageModel{ -// Class: types.StringNull(), -// Size: types.Int64Null(), -// }, -// &optionsModel{ -// Edition: types.StringNull(), -// RetentionDays: types.Int64Null(), -// }, -// &sqlserverflex.CreateInstancePayload{ -// Acl: &sqlserverflex.CreateInstancePayloadAcl{ -// Items: &[]string{ -// "", -// }, -// }, -// BackupSchedule: nil, -// FlavorId: nil, -// Name: nil, -// Storage: &sqlserverflex.CreateInstancePayloadStorage{ -// Class: nil, -// Size: nil, -// }, -// Options: &sqlserverflex.CreateInstancePayloadOptions{}, -// Version: nil, -// }, -// true, -// }, -// { -// "nil_model", -// nil, -// []string{}, -// &flavorModel{}, -// &storageModel{}, -// &optionsModel{}, -// nil, -// false, -// }, -// { -// "nil_acl", -// &Model{}, -// nil, -// &flavorModel{}, -// &storageModel{}, -// &optionsModel{}, -// &sqlserverflex.CreateInstancePayload{ -// Acl: &sqlserverflex.CreateInstancePayloadAcl{}, -// Storage: &sqlserverflex.CreateInstancePayloadStorage{}, -// Options: &sqlserverflex.CreateInstancePayloadOptions{}, -// }, -// true, -// }, -// { -// "nil_flavor", -// &Model{}, -// []string{}, -// nil, -// &storageModel{}, -// &optionsModel{}, -// nil, -// false, -// }, -// { -// "nil_storage", -// &Model{}, -// []string{}, -// &flavorModel{}, -// nil, -// &optionsModel{}, -// &sqlserverflex.CreateInstancePayload{ -// Acl: &sqlserverflex.CreateInstancePayloadAcl{ -// Items: &[]string{}, -// }, -// Storage: &sqlserverflex.CreateInstancePayloadStorage{}, -// Options: &sqlserverflex.CreateInstancePayloadOptions{}, -// }, -// true, -// }, -// { -// "nil_options", -// &Model{}, -// []string{}, -// &flavorModel{}, -// &storageModel{}, -// nil, -// &sqlserverflex.CreateInstancePayload{ -// Acl: &sqlserverflex.CreateInstancePayloadAcl{ -// Items: &[]string{}, -// }, -// Storage: &sqlserverflex.CreateInstancePayloadStorage{}, -// Options: &sqlserverflex.CreateInstancePayloadOptions{}, -// }, -// true, -// }, -// } -// for _, tt := range tests { -// t.Run(tt.description, func(t *testing.T) { -// output, err := toCreatePayload(tt.input, tt.inputAcl, tt.inputFlavor, tt.inputStorage, tt.inputOptions) -// if !tt.isValid && err == nil { -// t.Fatalf("Should have failed") -// } -// if tt.isValid && err != nil { -// t.Fatalf("Should not have failed: %v", err) -// } -// if tt.isValid { -// diff := cmp.Diff(output, tt.expected) -// if diff != "" { -// t.Fatalf("Data does not match: %s", diff) -// } -// } -// }) -// } -// } -// -// func TestToUpdatePayload(t *testing.T) { -// tests := []struct { -// description string -// input *Model -// inputAcl []string -// inputFlavor *flavorModel -// expected *sqlserverflex.PartialUpdateInstancePayload -// isValid bool -// }{ -// { -// "default_values", -// &Model{}, -// []string{}, -// &flavorModel{}, -// &sqlserverflex.PartialUpdateInstancePayload{ -// Acl: &sqlserverflex.CreateInstancePayloadAcl{ -// Items: &[]string{}, -// }, -// }, -// true, -// }, -// { -// "simple_values", -// &Model{ -// BackupSchedule: types.StringValue("schedule"), -// Name: types.StringValue("name"), -// Replicas: types.Int64Value(12), -// Version: types.StringValue("version"), -// }, -// []string{ -// "ip_1", -// "ip_2", -// }, -// &flavorModel{ -// Id: types.StringValue("flavor_id"), -// }, -// &sqlserverflex.PartialUpdateInstancePayload{ -// Acl: &sqlserverflex.CreateInstancePayloadAcl{ -// Items: &[]string{ -// "ip_1", -// "ip_2", -// }, -// }, -// BackupSchedule: utils.Ptr("schedule"), -// FlavorId: utils.Ptr("flavor_id"), -// Name: utils.Ptr("name"), -// Version: utils.Ptr("version"), -// }, -// true, -// }, -// { -// "null_fields_and_int_conversions", -// &Model{ -// BackupSchedule: types.StringNull(), -// Name: types.StringNull(), -// Replicas: types.Int64Value(2123456789), -// Version: types.StringNull(), -// }, -// []string{ -// "", -// }, -// &flavorModel{ -// Id: types.StringNull(), -// }, -// &sqlserverflex.PartialUpdateInstancePayload{ -// Acl: &sqlserverflex.CreateInstancePayloadAcl{ -// Items: &[]string{ -// "", -// }, -// }, -// BackupSchedule: nil, -// FlavorId: nil, -// Name: nil, -// Version: nil, -// }, -// true, -// }, -// { -// "nil_model", -// nil, -// []string{}, -// &flavorModel{}, -// nil, -// false, -// }, -// { -// "nil_acl", -// &Model{}, -// nil, -// &flavorModel{}, -// &sqlserverflex.PartialUpdateInstancePayload{ -// Acl: &sqlserverflex.CreateInstancePayloadAcl{}, -// }, -// true, -// }, -// { -// "nil_flavor", -// &Model{}, -// []string{}, -// nil, -// nil, -// false, -// }, -// } -// for _, tt := range tests { -// t.Run(tt.description, func(t *testing.T) { -// output, err := toUpdatePayload(tt.input, tt.inputAcl, tt.inputFlavor) -// if !tt.isValid && err == nil { -// t.Fatalf("Should have failed") -// } -// if tt.isValid && err != nil { -// t.Fatalf("Should not have failed: %v", err) -// } -// if tt.isValid { -// diff := cmp.Diff(output, tt.expected) -// if diff != "" { -// t.Fatalf("Data does not match: %s", diff) -// } -// } -// }) -// } -// } -// -// func TestLoadFlavorId(t *testing.T) { -// tests := []struct { -// description string -// inputFlavor *flavorModel -// mockedResp *sqlserverflex.ListFlavorsResponse -// expected *flavorModel -// getFlavorsFails bool -// isValid bool -// }{ -// { -// "ok_flavor", -// &flavorModel{ -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// &sqlserverflex.ListFlavorsResponse{ -// Flavors: &[]sqlserverflex.InstanceFlavorEntry{ -// { -// Id: utils.Ptr("fid-1"), -// Cpu: utils.Ptr(int64(2)), -// Description: utils.Ptr("description"), -// Ram: utils.Ptr(int64(8)), -// }, -// }, -// }, -// &flavorModel{ -// Id: types.StringValue("fid-1"), -// Description: types.StringValue("description"), -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// false, -// true, -// }, -// { -// "ok_flavor_2", -// &flavorModel{ -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// &sqlserverflex.ListFlavorsResponse{ -// Flavors: &[]sqlserverflex.InstanceFlavorEntry{ -// { -// Id: utils.Ptr("fid-1"), -// Cpu: utils.Ptr(int64(2)), -// Description: utils.Ptr("description"), -// Ram: utils.Ptr(int64(8)), -// }, -// { -// Id: utils.Ptr("fid-2"), -// Cpu: utils.Ptr(int64(1)), -// Description: utils.Ptr("description"), -// Ram: utils.Ptr(int64(4)), -// }, -// }, -// }, -// &flavorModel{ -// Id: types.StringValue("fid-1"), -// Description: types.StringValue("description"), -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// false, -// true, -// }, -// { -// "no_matching_flavor", -// &flavorModel{ -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// &sqlserverflex.ListFlavorsResponse{ -// Flavors: &[]sqlserverflex.InstanceFlavorEntry{ -// { -// Id: utils.Ptr("fid-1"), -// Cpu: utils.Ptr(int64(1)), -// Description: utils.Ptr("description"), -// Ram: utils.Ptr(int64(8)), -// }, -// { -// Id: utils.Ptr("fid-2"), -// Cpu: utils.Ptr(int64(1)), -// Description: utils.Ptr("description"), -// Ram: utils.Ptr(int64(4)), -// }, -// }, -// }, -// &flavorModel{ -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// false, -// false, -// }, -// { -// "nil_response", -// &flavorModel{ -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// &sqlserverflex.ListFlavorsResponse{}, -// &flavorModel{ -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// false, -// false, -// }, -// { -// "error_response", -// &flavorModel{ -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// &sqlserverflex.ListFlavorsResponse{}, -// &flavorModel{ -// CPU: types.Int64Value(2), -// RAM: types.Int64Value(8), -// }, -// true, -// false, -// }, -// } -// for _, tt := range tests { -// t.Run(tt.description, func(t *testing.T) { -// client := &sqlserverflexClientMocked{ -// returnError: tt.getFlavorsFails, -// listFlavorsResp: tt.mockedResp, -// } -// model := &Model{ -// ProjectId: types.StringValue("pid"), -// } -// flavorModel := &flavorModel{ -// CPU: tt.inputFlavor.CPU, -// RAM: tt.inputFlavor.RAM, -// } -// err := loadFlavorId(context.Background(), client, model, flavorModel) -// if !tt.isValid && err == nil { -// t.Fatalf("Should have failed") -// } -// if tt.isValid && err != nil { -// t.Fatalf("Should not have failed: %v", err) -// } -// if tt.isValid { -// diff := cmp.Diff(flavorModel, tt.expected) -// if diff != "" { -// t.Fatalf("Data does not match: %s", diff) -// } -// } -// }) -// } -// } diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go index 974e724c..d638bae2 100644 --- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go @@ -4,22 +4,68 @@ import ( "context" _ "embed" "fmt" + "log" "os" "strconv" + "strings" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance" + "github.com/stackitcloud/stackit-sdk-go/core/config" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" + sqlserverflexalphaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance" + // The fwresource import alias is so there is no collision // with the more typical acceptance testing import: // "github.com/hashicorp/terraform-plugin-testing/helper/resource" fwresource "github.com/hashicorp/terraform-plugin-framework/resource" ) +const providerPrefix = "stackitprivatepreview_sqlserverflexalpha" + +var testInstances []string + +func init() { + sweeperName := fmt.Sprintf("%s_%s", providerPrefix, "sweeper") + + resource.AddTestSweepers(sweeperName, &resource.Sweeper{ + Name: sweeperName, + F: func(region string) error { + ctx := context.Background() + apiClientConfigOptions := []config.ConfigurationOption{} + apiClient, err := sqlserverflexalphaPkgGen.NewAPIClient(apiClientConfigOptions...) + if err != nil { + log.Fatalln(err) + } + + instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, region). + Size(100). + Execute() + if err != nil { + log.Fatalln(err) + } + + for _, inst := range instances.GetInstances() { + if strings.HasPrefix(inst.GetName(), "tf-acc-") { + for _, item := range testInstances { + if inst.GetName() == item { + delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, region, inst.GetId()) + if delErr != nil { + // TODO: maybe just warn? + log.Fatalln(delErr) + } + } + } + } + } + return nil + }, + }) +} + func TestInstanceResourceSchema(t *testing.T) { t.Parallel() @@ -85,9 +131,15 @@ type User struct { } type Database struct { - Name string - ProjectId string - Owner string + Name string + ProjectId string + Owner string + Collation string + Compatibility string +} + +func resName(res, name string) string { + return fmt.Sprintf("%s_%s.%s", providerPrefix, res, name) } func getExample() resData { @@ -112,10 +164,6 @@ func getExample() resData { func TestAccInstance(t *testing.T) { exData := getExample() - resName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexalpha_instance.%s", - exData.TfName, - ) updNameData := exData updNameData.Name = "name-updated" @@ -123,8 +171,12 @@ func TestAccInstance(t *testing.T) { updSizeData := exData updSizeData.Size = 25 - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", exData.TfName) + testInstances = append(testInstances, exData.TfName) + }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ // Create and verify @@ -134,8 +186,9 @@ func TestAccInstance(t *testing.T) { exData, ), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", exData.Name), - resource.TestCheckResourceAttrSet(resName, "id"), + resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name), + resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"), + // TODO: check all fields ), }, // Update name and verify @@ -145,7 +198,7 @@ func TestAccInstance(t *testing.T) { updNameData, ), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", updNameData.Name), + resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", updNameData.Name), ), }, // Update size and verify @@ -156,15 +209,18 @@ func TestAccInstance(t *testing.T) { ), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( - resName, + testutils.ResStr(providerPrefix, "instance", exData.TfName), "storage.size", strconv.Itoa(int(updSizeData.Size)), ), ), }, + { + RefreshState: true, + }, //// Import test //{ - // ResourceName: "example_resource.test", + // ResourceName: resName("instance", exData.TfName), // ImportState: true, // ImportStateVerify: true, // }, @@ -172,29 +228,39 @@ func TestAccInstance(t *testing.T) { }) } -func TestAccInstanceWithUsers(t *testing.T) { +func TestAccInstanceNoEncryption(t *testing.T) { data := getExample() + + dbName := "testDb" userName := "testUser" data.Users = []User{ { Name: userName, ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), - Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, + Roles: []string{ + "##STACKIT_DatabaseManager##", + "##STACKIT_LoginManager##", + "##STACKIT_ProcessManager##", + "##STACKIT_ServerManager##", + "##STACKIT_SQLAgentManager##", + "##STACKIT_SQLAgentUser##", + }, + }, + } + data.Databases = []Database{ + { + Name: dbName, + ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), + Owner: userName, }, } - resName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexalpha_instance.%s", - data.TfName, - ) - - resUserName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexalpha_user.%s", - userName, - ) - - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", data.TfName) + testInstances = append(testInstances, data.TfName) + }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ // Create and verify @@ -204,18 +270,71 @@ func TestAccInstanceWithUsers(t *testing.T) { data, ), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", data.Name), - resource.TestCheckResourceAttrSet(resName, "id"), - resource.TestCheckResourceAttr(resUserName, "name", userName), - resource.TestCheckResourceAttrSet(resUserName, "id"), + // check instance values are set + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"), + + resource.TestCheckNoResourceAttr(resName("instance", data.TfName), "encryption"), + + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), + + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), + + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"), + + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"), + + // check instance values are correct + resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name), + + // check user values are set + resource.TestCheckResourceAttrSet(resName("user", userName), "id"), + resource.TestCheckResourceAttrSet(resName("user", userName), "username"), + // resource.TestCheckResourceAttrSet(resName("user", userName), "roles"), + + // func(s *terraform.State) error { + // return nil + // }, + + // check user values are correct + resource.TestCheckResourceAttr(resName("user", userName), "username", userName), + resource.TestCheckResourceAttr(resName("user", userName), "roles.#", strconv.Itoa(len(data.Users[0].Roles))), + + // check database values are set + resource.TestCheckResourceAttrSet(resName("database", dbName), "id"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "name"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"), + + // check database values are correct + resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName), + resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName), ), }, }, }) } -func TestAccInstanceWithDatabases(t *testing.T) { +func TestAccInstanceEncryption(t *testing.T) { data := getExample() + dbName := "testDb" userName := "testUser" data.Users = []User{ @@ -233,23 +352,18 @@ func TestAccInstanceWithDatabases(t *testing.T) { }, } - resName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexalpha_instance.%s", - data.TfName, - ) + data.UseEncryption = true + data.KekKeyId = "fe039bcf-8d7b-431a-801d-9e81371a6b7b" + data.KekKeyRingId = "6a2d95ab-3c4c-4963-a2bb-08d17a320e27" + data.KekKeyVersion = 1 + data.KekServiceAccount = "henselinm-u2v3ex1@sa.stackit.cloud" - resUserName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexalpha_user.%s", - userName, - ) - - resDbName := fmt.Sprintf( - "stackitprivatepreview_sqlserverflexalpha_database.%s", - dbName, - ) - - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", data.TfName) + testInstances = append(testInstances, data.TfName) + }, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, Steps: []resource.TestStep{ // Create and verify @@ -259,13 +373,59 @@ func TestAccInstanceWithDatabases(t *testing.T) { data, ), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(resName, "name", data.Name), - resource.TestCheckResourceAttrSet(resName, "id"), - resource.TestCheckResourceAttr(resUserName, "name", userName), - resource.TestCheckResourceAttrSet(resUserName, "id"), - resource.TestCheckResourceAttr(resDbName, "name", dbName), - resource.TestCheckResourceAttr(resDbName, "owner", userName), - resource.TestCheckResourceAttrSet(resDbName, "id"), + // check instance values are set + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"), + resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"), + + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), + //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), + + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), + + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"), + + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"), + + // check instance values are correct + resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name), + + // check user values are set + resource.TestCheckResourceAttrSet(resName("user", userName), "id"), + resource.TestCheckResourceAttrSet(resName("user", userName), "username"), + + // func(s *terraform.State) error { + // return nil + // }, + + // check user values are correct + resource.TestCheckResourceAttr(resName("user", userName), "username", userName), + resource.TestCheckResourceAttr(resName("user", userName), "roles.#", "2"), + + // check database values are set + resource.TestCheckResourceAttrSet(resName("database", dbName), "id"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "name"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"), + resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"), + + // check database values are correct + resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName), + resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName), ), }, }, diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go_old.bak b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go_old.bak deleted file mode 100644 index 5138deee..00000000 --- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go_old.bak +++ /dev/null @@ -1,483 +0,0 @@ -// Copyright (c) STACKIT - -package sqlserverflexalpha_test - -import ( - "context" - _ "embed" - "fmt" - "maps" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/config" - "github.com/hashicorp/terraform-plugin-testing/helper/acctest" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - coreconfig "github.com/stackitcloud/stackit-sdk-go/core/config" - "github.com/stackitcloud/stackit-sdk-go/core/utils" - "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex" - "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/wait" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" -) - -var ( - //go:embed testdata/resource-max.tf - resourceMaxConfig string - //go:embed testdata/resource-min.tf - resourceMinConfig string -) - -var testConfigVarsMin = config.Variables{ - "project_id": config.StringVariable(testutils.ProjectId), - "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), - "flavor_cpu": config.IntegerVariable(4), - "flavor_ram": config.IntegerVariable(16), - "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"), - "replicas": config.IntegerVariable(1), - "flavor_id": config.StringVariable("4.16-Single"), - "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), - "role": config.StringVariable("##STACKIT_LoginManager##"), -} - -var testConfigVarsMax = config.Variables{ - "project_id": config.StringVariable(testutils.ProjectId), - "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))), - "acl1": config.StringVariable("192.168.0.0/16"), - "flavor_cpu": config.IntegerVariable(4), - "flavor_ram": config.IntegerVariable(16), - "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"), - "storage_class": config.StringVariable("premium-perf2-stackit"), - "storage_size": config.IntegerVariable(40), - "server_version": config.StringVariable("2022"), - "replicas": config.IntegerVariable(1), - "options_retention_days": config.IntegerVariable(64), - "flavor_id": config.StringVariable("4.16-Single"), - "backup_schedule": config.StringVariable("00 6 * * *"), - "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))), - "role": config.StringVariable("##STACKIT_LoginManager##"), - "region": config.StringVariable(testutils.Region), -} - -func configVarsMinUpdated() config.Variables { - temp := maps.Clone(testConfigVarsMax) - temp["name"] = config.StringVariable(testutils.ConvertConfigVariable(temp["name"]) + "changed") - return temp -} - -func configVarsMaxUpdated() config.Variables { - temp := maps.Clone(testConfigVarsMax) - temp["backup_schedule"] = config.StringVariable("00 12 * * *") - return temp -} - -func TestAccSQLServerFlexMinResource(t *testing.T) { - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccChecksqlserverflexDestroy, - Steps: []resource.TestStep{ - // Creation - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMin["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // Update - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // data source - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: testConfigVarsMin, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMin["name"])), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_user.user", "instance_id", - ), - - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_description"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])), - - // User data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutils.ConvertConfigVariable(testConfigVarsMin["project_id"])), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutils.ConvertConfigVariable(testConfigVarsMin["username"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutils.ConvertConfigVariable(testConfigVarsMax["role"])), - ), - }, - // Import - { - ConfigVariables: testConfigVarsMin, - ResourceName: "stackit_sqlserverflex_instance.instance", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - - return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"backup_schedule"}, - ImportStateCheck: func(s []*terraform.InstanceState) error { - if len(s) != 1 { - return fmt.Errorf("expected 1 state, got %d", len(s)) - } - return nil - }, - }, - { - ResourceName: "stackit_sqlserverflex_user.user", - ConfigVariables: testConfigVarsMin, - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - userId, ok := r.Primary.Attributes["user_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute user_id") - } - - return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - // Update - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMinConfig, - ConfigVariables: configVarsMinUpdated(), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(configVarsMinUpdated()["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(configVarsMinUpdated()["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(configVarsMinUpdated()["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(configVarsMinUpdated()["flavor_ram"])), - ), - }, - // Deletion is done by the framework implicitly - }, - }) -} - -func TestAccSQLServerFlexMaxResource(t *testing.T) { - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccChecksqlserverflexDestroy, - Steps: []resource.TestStep{ - // Creation - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(testConfigVarsMax["storage_class"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(testConfigVarsMax["storage_size"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(testConfigVarsMax["server_version"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutils.Region), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // Update - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(testConfigVarsMax["storage_class"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(testConfigVarsMax["storage_size"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(testConfigVarsMax["server_version"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutils.Region), - // User - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"), - ), - }, - // data source - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: testConfigVarsMax, - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(testConfigVarsMax["name"])), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "project_id", - "stackit_sqlserverflex_instance.instance", "project_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_instance.instance", "instance_id", - "stackit_sqlserverflex_instance.instance", "instance_id", - ), - resource.TestCheckResourceAttrPair( - "data.stackit_sqlserverflex_user.user", "instance_id", - "stackit_sqlserverflex_user.user", "instance_id", - ), - - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(testConfigVarsMax["acl1"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_id"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_description"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(testConfigVarsMax["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])), - - // User data - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutils.ConvertConfigVariable(testConfigVarsMax["project_id"])), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutils.ConvertConfigVariable(testConfigVarsMax["username"])), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"), - resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutils.ConvertConfigVariable(testConfigVarsMax["role"])), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "host"), - resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "port"), - ), - }, - // Import - { - ConfigVariables: testConfigVarsMax, - ResourceName: "stackit_sqlserverflex_instance.instance", - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - - return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"backup_schedule"}, - ImportStateCheck: func(s []*terraform.InstanceState) error { - if len(s) != 1 { - return fmt.Errorf("expected 1 state, got %d", len(s)) - } - if s[0].Attributes["backup_schedule"] != testutils.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]) { - return fmt.Errorf("expected backup_schedule %s, got %s", testConfigVarsMax["backup_schedule"], s[0].Attributes["backup_schedule"]) - } - return nil - }, - }, - { - ResourceName: "stackit_sqlserverflex_user.user", - ConfigVariables: testConfigVarsMax, - ImportStateIdFunc: func(s *terraform.State) (string, error) { - r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"] - if !ok { - return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user") - } - instanceId, ok := r.Primary.Attributes["instance_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute instance_id") - } - userId, ok := r.Primary.Attributes["user_id"] - if !ok { - return "", fmt.Errorf("couldn't find attribute user_id") - } - - return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil - }, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - // Update - { - Config: testutils.SQLServerFlexProviderConfig("") + "\n" + resourceMaxConfig, - ConfigVariables: configVarsMaxUpdated(), - Check: resource.ComposeAggregateTestCheckFunc( - // Instance data - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutils.ConvertConfigVariable(configVarsMaxUpdated()["project_id"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutils.ConvertConfigVariable(configVarsMaxUpdated()["name"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutils.ConvertConfigVariable(configVarsMaxUpdated()["acl1"])), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"), - resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutils.ConvertConfigVariable(configVarsMaxUpdated()["flavor_cpu"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutils.ConvertConfigVariable(configVarsMaxUpdated()["flavor_ram"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutils.ConvertConfigVariable(configVarsMaxUpdated()["replicas"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutils.ConvertConfigVariable(configVarsMaxUpdated()["storage_class"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutils.ConvertConfigVariable(configVarsMaxUpdated()["storage_size"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutils.ConvertConfigVariable(configVarsMaxUpdated()["server_version"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutils.ConvertConfigVariable(configVarsMaxUpdated()["options_retention_days"])), - resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutils.ConvertConfigVariable(configVarsMaxUpdated()["backup_schedule"])), - ), - }, - // Deletion is done by the framework implicitly - }, - }) -} - -func testAccChecksqlserverflexDestroy(s *terraform.State) error { - ctx := context.Background() - var client *sqlserverflex.APIClient - var err error - if testutils.SQLServerFlexCustomEndpoint == "" { - client, err = sqlserverflex.NewAPIClient() - } else { - client, err = sqlserverflex.NewAPIClient( - coreconfig.WithEndpoint(testutils.SQLServerFlexCustomEndpoint), - ) - } - if err != nil { - return fmt.Errorf("creating client: %w", err) - } - - instancesToDestroy := []string{} - for _, rs := range s.RootModule().Resources { - if rs.Type != "stackit_sqlserverflex_instance" { - continue - } - // instance terraform ID: = "[project_id],[region],[instance_id]" - instanceId := strings.Split(rs.Primary.ID, core.Separator)[2] - instancesToDestroy = append(instancesToDestroy, instanceId) - } - - instancesResp, err := client.ListInstances(ctx, testutils.ProjectId, testutils.Region).Execute() - if err != nil { - return fmt.Errorf("getting instancesResp: %w", err) - } - - items := *instancesResp.Items - for i := range items { - if items[i].Id == nil { - continue - } - if utils.Contains(instancesToDestroy, *items[i].Id) { - err := client.DeleteInstanceExecute(ctx, testutils.ProjectId, *items[i].Id, testutils.Region) - if err != nil { - return fmt.Errorf("destroying instance %s during CheckDestroy: %w", *items[i].Id, err) - } - _, err = wait.DeleteInstanceWaitHandler(ctx, client, testutils.ProjectId, *items[i].Id, testutils.Region).WaitWithContext(ctx) - if err != nil { - return fmt.Errorf("destroying instance %s during CheckDestroy: waiting for deletion %w", *items[i].Id, err) - } - } - } - return nil -} diff --git a/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl b/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl index 035a4344..0bf11c9c 100644 --- a/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl +++ b/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl @@ -15,8 +15,8 @@ resource "stackitprivatepreview_sqlserverflexalpha_instance" "{{ .TfName }}" { } {{ if .UseEncryption }} encryption = { - kek_key_id = {{ .KekKeyId }} - kek_key_ring_id = {{ .KekKeyRingId }} + kek_key_id = "{{ .KekKeyId }}" + kek_key_ring_id = "{{ .KekKeyRingId }}" kek_key_version = {{ .KekKeyVersion }} service_account = "{{ .KekServiceAccount }}" } @@ -44,10 +44,17 @@ resource "stackitprivatepreview_sqlserverflexalpha_user" "{{ $user.Name }}" { {{ $tfName := .TfName }} {{ range $db := .Databases }} resource "stackitprivatepreview_sqlserverflexalpha_database" "{{ $db.Name }}" { - project_id = "{{ $db.ProjectId }}" - instance_id = stackitprivatepreview_sqlserverflexalpha_instance.{{ $tfName }}.instance_id - name = "{{ $db.Name }}" - owner = "{{ $db.Owner }}" + depends_on = [stackitprivatepreview_sqlserverflexalpha_user.{{ $db.Owner }}] + project_id = "{{ $db.ProjectId }}" + instance_id = stackitprivatepreview_sqlserverflexalpha_instance.{{ $tfName }}.instance_id + name = "{{ $db.Name }}" + owner = "{{ $db.Owner }}" +{{ if $db.Collation }} + collation = "{{ $db.Collation }}" +{{ end }} +{{ if $db.Compatibility }} + compatibility = "{{ $db.Compatibility }}" +{{ end }} } {{ end }} {{ end }} diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource.go b/stackit/internal/services/sqlserverflexalpha/user/datasource.go index 95c517ce..64efb4a3 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/datasource.go @@ -5,54 +5,47 @@ import ( "fmt" "net/http" - "github.com/hashicorp/terraform-plugin-framework-validators/int64validator" "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate" + + sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" + sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/datasources_gen" ) -// Ensure the implementation satisfies the expected interfaces. -var ( - _ datasource.DataSource = &userDataSource{} -) +var _ datasource.DataSource = (*userDataSource)(nil) + +const errorPrefix = "[sqlserverflexalpha - User]" -// NewUserDataSource is a helper function to simplify the provider implementation. func NewUserDataSource() datasource.DataSource { return &userDataSource{} } type dataSourceModel struct { - //TODO: check generated data source for the correct types and pointers - Id types.String `tfsdk:"id"` // needed by TF - UserId types.Int64 `tfsdk:"user_id"` - InstanceId types.String `tfsdk:"instance_id"` - ProjectId types.String `tfsdk:"project_id"` - Username types.String `tfsdk:"username"` - Roles types.Set `tfsdk:"roles"` - Host types.String `tfsdk:"host"` - Port types.Int64 `tfsdk:"port"` - Region types.String `tfsdk:"region"` - Status types.String `tfsdk:"status"` DefaultDatabase types.String `tfsdk:"default_database"` + Host types.String `tfsdk:"host"` + Id types.String `tfsdk:"id"` + InstanceId types.String `tfsdk:"instance_id"` + Port types.Int64 `tfsdk:"port"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Roles types.List `tfsdk:"roles"` + Status types.String `tfsdk:"status"` + UserId types.Int64 `tfsdk:"user_id"` + Username types.String `tfsdk:"username"` } -// userDataSource is the data source implementation. type userDataSource struct { - client *sqlserverflexalpha.APIClient + client *sqlserverflexalphaPkg.APIClient providerData core.ProviderData } -// Metadata returns the data source type name. -func (r *userDataSource) Metadata( +func (d *userDataSource) Metadata( _ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse, @@ -60,108 +53,31 @@ func (r *userDataSource) Metadata( resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user" } +func (d *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = sqlserverflexalphaGen.UserDataSourceSchema(ctx) +} + // Configure adds the provider configured client to the data source. -func (r *userDataSource) Configure( +func (d *userDataSource) Configure( ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse, ) { var ok bool - r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) + d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { return } - apiClient := sqlserverflexUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics) + apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics) if resp.Diagnostics.HasError() { return } - r.client = apiClient - tflog.Info(ctx, "SQLServer Flex beta user client configured") + d.client = apiClient + tflog.Info(ctx, "SQL SERVER Flex alpha database client configured") } -// Schema defines the schema for the data source. -func (r *userDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - descriptions := map[string]string{ - "main": "SQLServer Flex user data source schema. Must have a `region` specified in the provider configuration.", - "id": "Terraform's internal data source. ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".", - "user_id": "User ID.", - "instance_id": "ID of the SQLServer Flex instance.", - "project_id": "STACKIT project ID to which the instance is associated.", - "username": "Username of the SQLServer Flex instance.", - "roles": "Database access levels for the user.", - "password": "Password of the user account.", - "region": "The resource region. If not defined, the provider region is used.", - "status": "Status of the user.", - "default_database": "Default database of the user.", - } - - resp.Schema = schema.Schema{ - Description: descriptions["main"], - Attributes: map[string]schema.Attribute{ - "id": schema.StringAttribute{ - Description: descriptions["id"], - Computed: true, - }, - "user_id": schema.Int64Attribute{ - Description: descriptions["user_id"], - Required: true, - Validators: []validator.Int64{ - int64validator.AtLeast(1), - }, - }, - "instance_id": schema.StringAttribute{ - Description: descriptions["instance_id"], - Required: true, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "project_id": schema.StringAttribute{ - Description: descriptions["project_id"], - Required: true, - Validators: []validator.String{ - validate.UUID(), - validate.NoSeparator(), - }, - }, - "username": schema.StringAttribute{ - Description: descriptions["username"], - Computed: true, - }, - "roles": schema.SetAttribute{ - Description: descriptions["roles"], - ElementType: types.StringType, - Computed: true, - }, - "host": schema.StringAttribute{ - Computed: true, - }, - "port": schema.Int64Attribute{ - Computed: true, - }, - "region": schema.StringAttribute{ - // the region cannot be found automatically, so it has to be passed - Optional: true, - Description: descriptions["region"], - }, - "status": schema.StringAttribute{ - Computed: true, - }, - "default_database": schema.StringAttribute{ - Computed: true, - }, - }, - } -} - -// Read refreshes the Terraform state with the latest data. -func (r *userDataSource) Read( - ctx context.Context, - req datasource.ReadRequest, - resp *datasource.ReadResponse, -) { // nolint:gocritic // function signature required by Terraform +func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { var model dataSourceModel diags := req.Config.Get(ctx, &model) resp.Diagnostics.Append(diags...) @@ -174,13 +90,13 @@ func (r *userDataSource) Read( projectId := model.ProjectId.ValueString() instanceId := model.InstanceId.ValueString() userId := model.UserId.ValueInt64() - region := r.providerData.GetRegionWithOverride(model.Region) + region := d.providerData.GetRegionWithOverride(model.Region) ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "user_id", userId) ctx = tflog.SetField(ctx, "region", region) - recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute() + recordSetResp, err := d.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute() if err != nil { utils.LogError( ctx, @@ -221,5 +137,5 @@ func (r *userDataSource) Read( if resp.Diagnostics.HasError() { return } - tflog.Info(ctx, "SQLServer Flex alpha instance read") + tflog.Info(ctx, "SQLServer Flex beta instance read") } diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper.go b/stackit/internal/services/sqlserverflexalpha/user/mapper.go index 398011be..515c9ddc 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper.go @@ -42,7 +42,7 @@ func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *da // Map roles if user.Roles == nil { - model.Roles = types.SetNull(types.StringType) + model.Roles = types.List(types.SetNull(types.StringType)) } else { var roles []attr.Value for _, role := range *user.Roles { @@ -52,7 +52,7 @@ func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *da if diags.HasError() { return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) } - model.Roles = rolesSet + model.Roles = types.List(rolesSet) } // Set remaining attributes @@ -141,7 +141,7 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *res if user.Roles != nil { var roles []attr.Value for _, role := range *user.Roles { - roles = append(roles, types.StringValue(role)) + roles = append(roles, types.StringValue(string(role))) } rolesSet, diags := types.SetValue(types.StringType, roles) if diags.HasError() { @@ -154,6 +154,9 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *res model.Roles = types.List(types.SetNull(types.StringType)) } + model.Password = types.StringPointerValue(user.Password) + model.Uri = types.StringPointerValue(user.Uri) + model.Host = types.StringPointerValue(user.Host) model.Port = types.Int64PointerValue(user.Port) model.Region = types.StringValue(region) diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go index cb4e39f3..f981eb0e 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go @@ -30,7 +30,7 @@ func TestMapDataSourceFields(t *testing.T) { InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), Username: types.StringNull(), - Roles: types.SetNull(types.StringType), + Roles: types.List(types.SetNull(types.StringType)), Host: types.StringNull(), Port: types.Int64Null(), Region: types.StringValue(testRegion), @@ -60,12 +60,14 @@ func TestMapDataSourceFields(t *testing.T) { InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), Username: types.StringValue("username"), - Roles: types.SetValueMust( - types.StringType, []attr.Value{ - types.StringValue("role_1"), - types.StringValue("role_2"), - types.StringValue(""), - }, + Roles: types.List( + types.SetValueMust( + types.StringType, []attr.Value{ + types.StringValue("role_1"), + types.StringValue("role_2"), + types.StringValue(""), + }, + ), ), Host: types.StringValue("host"), Port: types.Int64Value(1234), @@ -91,7 +93,7 @@ func TestMapDataSourceFields(t *testing.T) { InstanceId: types.StringValue("iid"), ProjectId: types.StringValue("pid"), Username: types.StringNull(), - Roles: types.SetValueMust(types.StringType, []attr.Value{}), + Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})), Host: types.StringNull(), Port: types.Int64Value(2123456789), Region: types.StringValue(testRegion), diff --git a/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml index b01aae98..8ff346ab 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml +++ b/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml @@ -3,49 +3,40 @@ fields: modifiers: - 'UseStateForUnknown' - - name: 'user_id' - modifiers: - - 'UseStateForUnknown' - - name: 'instance_id' validators: - validate.NoSeparator - validate.UUID modifiers: + - 'UseStateForUnknown' - 'RequiresReplace' - name: 'project_id' validators: - validate.NoSeparator - validate.UUID - modifiers: - - 'RequiresReplace' - - - name: 'username' - modifiers: - - 'RequiresReplace' - - - name: 'roles' - modifiers: - - 'RequiresReplace' - - - name: 'password' - modifiers: - - 'UseStateForUnknown' - - - name: 'host' - modifiers: - - 'UseStateForUnknown' - - - name: 'port' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'region' modifiers: - 'RequiresReplace' - - name: 'status' + - name: 'user_id' + modifiers: + - 'UseStateForUnknown' + - 'RequiresReplace' + + - name: 'username' + modifiers: + - 'UseStateForUnknown' + + - name: 'roles' + modifiers: + - 'UseStateForUnknown' + + - name: 'password' modifiers: - 'UseStateForUnknown' @@ -53,6 +44,6 @@ fields: modifiers: - 'UseStateForUnknown' - - name: 'default_database' + - name: 'status' modifiers: - 'UseStateForUnknown' diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index 3119a06c..2b98c94c 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -8,26 +8,27 @@ import ( "net/http" "strconv" "strings" - - "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" - - "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" - sqlserverflexalphagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen" - sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" - sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha" + "time" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/stackitcloud/stackit-sdk-go/core/oapierror" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" + sqlserverflexalphagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen" + sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" + sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" + + sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen" ) -// Ensure the implementation satisfies the expected interfaces. var ( _ resource.Resource = &userResource{} _ resource.ResourceWithConfigure = &userResource{} @@ -36,13 +37,12 @@ var ( _ resource.ResourceWithIdentity = &userResource{} ) -// NewUserResource is a helper function to simplify the provider implementation. func NewUserResource() resource.Resource { return &userResource{} } // resourceModel describes the resource data model. -type resourceModel = sqlserverflexalphagen.UserModel +type resourceModel = sqlserverflexalphaResGen.UserModel // UserResourceIdentityModel describes the resource's identity attributes. type UserResourceIdentityModel struct { @@ -52,14 +52,12 @@ type UserResourceIdentityModel struct { UserID types.Int64 `tfsdk:"user_id"` } -// userResource is the resource implementation. type userResource struct { client *sqlserverflexalpha.APIClient providerData core.ProviderData } -// Metadata returns the resource type name. -func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user" } @@ -76,7 +74,7 @@ func (r *userResource) Configure(ctx context.Context, req resource.ConfigureRequ return } r.client = apiClient - tflog.Info(ctx, "SQLServer Alpha Flex user client configured") + tflog.Info(ctx, "SQLServer Beta Flex user client configured") } // ModifyPlan implements resource.ResourceWithModifyPlan. @@ -219,6 +217,7 @@ func (r *userResource) Create( ) return } + userId := *userResp.Id ctx = tflog.SetField(ctx, "user_id", userId) @@ -227,14 +226,13 @@ func (r *userResource) Create( ProjectID: types.StringValue(projectId), Region: types.StringValue(region), InstanceID: types.StringValue(instanceId), - UserID: types.Int64Value(userResp.GetId()), + UserID: types.Int64Value(userId), } resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) if resp.Diagnostics.HasError() { return } - // Map response body to schema err = mapFieldsCreate(userResp, &model, region) if err != nil { core.LogAndAddError( @@ -245,6 +243,51 @@ func (r *userResource) Create( ) return } + + waitResp, err := sqlserverflexalphaWait.CreateUserWaitHandler( + ctx, + r.client, + projectId, + instanceId, + region, + userId, + ).SetSleepBeforeWait( + 90 * time.Second, + ).SetTimeout( + 90 * time.Minute, + ).WaitWithContext(ctx) + + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "create user", + fmt.Sprintf("Instance creation waiting: %v", err), + ) + return + } + + if waitResp.Id == nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "create user", + "Instance creation waiting: returned id is nil", + ) + return + } + + // Map response body to schema + err = mapFields(waitResp, &model, region) + if err != nil { + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error creating user", + fmt.Sprintf("Processing API payload: %v", err), + ) + return + } // Set state to fully populated data diags = resp.State.Set(ctx, model) resp.Diagnostics.Append(diags...) @@ -296,18 +339,6 @@ func (r *userResource) Read( ctx = core.LogResponse(ctx) - // Set data returned by API in identity - identity := UserResourceIdentityModel{ - ProjectID: types.StringValue(projectId), - Region: types.StringValue(region), - InstanceID: types.StringValue(instanceId), - UserID: types.Int64Value(userId), - } - resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) - if resp.Diagnostics.HasError() { - return - } - // Map response body to schema err = mapFields(recordSetResp, &model, region) if err != nil { @@ -320,6 +351,18 @@ func (r *userResource) Read( return } + // Set data returned by API in identity + identity := UserResourceIdentityModel{ + ProjectID: types.StringValue(projectId), + Region: types.StringValue(region), + InstanceID: types.StringValue(instanceId), + UserID: types.Int64Value(userId), + } + resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) + if resp.Diagnostics.HasError() { + return + } + // Set refreshed state diags = resp.State.Set(ctx, model) resp.Diagnostics.Append(diags...) @@ -366,9 +409,29 @@ func (r *userResource) Delete( // Delete existing record set // err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute() + err := r.client.DeleteUserRequestExecute(ctx, projectId, region, instanceId, userId) + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + // TODO err handling + return + } + switch oapiErr.StatusCode { + case http.StatusNotFound: + resp.State.RemoveResource(ctx) + return + // case http.StatusInternalServerError: + // tflog.Warn(ctx, "[delete user] Wait handler got error 500") + // return false, nil, nil + default: + // TODO err handling + return + } + } // Delete existing record set - _, err := sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). + _, err = sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). WaitWithContext(ctx) // err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() if err != nil { @@ -377,6 +440,7 @@ func (r *userResource) Delete( } ctx = core.LogResponse(ctx) + resp.State.RemoveResource(ctx) tflog.Info(ctx, "SQLServer Flex user deleted") @@ -422,7 +486,7 @@ func (r *userResource) ImportState( resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...) - tflog.Info(ctx, "Postgres Flex user state imported") + tflog.Info(ctx, "SQLServer Flex user state imported") return } diff --git a/stackit/internal/wait/sqlserverflexalpha/wait.go b/stackit/internal/wait/sqlserverflexalpha/wait.go index fd9a1a4b..05fa5eb4 100644 --- a/stackit/internal/wait/sqlserverflexalpha/wait.go +++ b/stackit/internal/wait/sqlserverflexalpha/wait.go @@ -1,5 +1,3 @@ -// Copyright (c) STACKIT - package sqlserverflexalpha import ( @@ -28,145 +26,355 @@ const ( InstanceStateTerminating = "TERMINATING" ) -// APIClientInstanceInterface Interface needed for tests -type APIClientInstanceInterface interface { - GetInstanceRequestExecute(ctx context.Context, projectId, region, instanceId string) (*sqlserverflex.GetInstanceResponse, error) +// APIClientInterface Interface needed for tests +type APIClientInterface interface { + GetInstanceRequestExecute( + ctx context.Context, + projectId, region, instanceId string, + ) (*sqlserverflex.GetInstanceResponse, error) + GetDatabaseRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + databaseName string, + ) (*sqlserverflex.GetDatabaseResponse, error) + GetUserRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + userId int64, + ) (*sqlserverflex.GetUserResponse, error) + + ListRolesRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + ) (*sqlserverflex.ListRolesResponse, error) + + ListUsersRequest(ctx context.Context, projectId string, region string, instanceId string) sqlserverflex.ApiListUsersRequestRequest + + ListUsersRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + ) (*sqlserverflex.ListUserResponse, error) } // APIClientUserInterface Interface needed for tests type APIClientUserInterface interface { - DeleteUserRequestExecute(ctx context.Context, projectId string, region string, instanceId string, userId int64) error + DeleteUserRequestExecute( + ctx context.Context, + projectId string, + region string, + instanceId string, + userId int64, + ) error } // CreateInstanceWaitHandler will wait for instance creation -func CreateInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { - handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { - s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) - if err != nil { - // TODO: catch 502, 503 and 504 - return false, nil, err - } - if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { - return false, nil, nil - } - switch strings.ToLower(string(*s.Status)) { - case strings.ToLower(InstanceStateSuccess): - if *s.Network.AccessScope == "SNA" { - if s.Network.InstanceAddress == nil { - tflog.Info(ctx, "Waiting for instance_address") - return false, nil, nil - } - if s.Network.RouterAddress == nil { - tflog.Info(ctx, "Waiting for router_address") - return false, nil, nil - } +func CreateInstanceWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region string, +) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { + s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) + if err != nil { + return false, nil, err } - return true, s, nil - case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): - return true, s, fmt.Errorf("create failed for instance with id %s", instanceId) - case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): - tflog.Info(ctx, "request is being handled", map[string]interface{}{ - "status": *s.Status, - }) - return false, nil, nil - default: - tflog.Info(ctx, "Wait (create) received unknown status", map[string]interface{}{ - "instanceId": instanceId, - "status": s.Status, - }) - return false, s, nil - } - }) + if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { + return false, nil, nil + } + switch strings.ToLower(string(*s.Status)) { + case strings.ToLower(InstanceStateSuccess): + if s.Network != nil && s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" { + if s.Network.InstanceAddress == nil { + tflog.Info(ctx, "Waiting for instance_address") + return false, nil, nil + } + if s.Network.RouterAddress == nil { + tflog.Info(ctx, "Waiting for router_address") + return false, nil, nil + } + } + + tflog.Info(ctx, "trying to get roles") + time.Sleep(10 * time.Second) + _, rolesErr := a.ListRolesRequestExecute(ctx, projectId, region, instanceId) + if rolesErr != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(rolesErr, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusInternalServerError { + tflog.Info( + ctx, "got error from api", map[string]interface{}{ + "error": rolesErr.Error(), + }, + ) + return false, nil, rolesErr + } + tflog.Info( + ctx, "wait for get-roles to work hack", map[string]interface{}{}, + ) + time.Sleep(10 * time.Second) + return false, nil, nil + } + + tflog.Info(ctx, "trying to get users") + time.Sleep(10 * time.Second) + _, usersErr := a.ListUsersRequestExecute(ctx, projectId, region, instanceId) + if usersErr != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(usersErr, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusInternalServerError { + tflog.Info( + ctx, "got error from api", map[string]interface{}{ + "error": rolesErr.Error(), + }, + ) + return false, nil, usersErr + } + tflog.Info( + ctx, "wait for get-users to work hack", map[string]interface{}{}, + ) + time.Sleep(10 * time.Second) + return false, nil, nil + } + return true, s, nil + case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): + return true, nil, fmt.Errorf("create failed for instance with id %s", instanceId) + case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): + tflog.Info( + ctx, "request is being handled", map[string]interface{}{ + "status": *s.Status, + }, + ) + time.Sleep(10 * time.Second) + return false, nil, nil + default: + tflog.Info( + ctx, "Wait (create) received unknown status", map[string]interface{}{ + "instanceId": instanceId, + "status": s.Status, + }, + ) + return true, nil, errors.New("unknown status received") + } + }, + ) return handler } // UpdateInstanceWaitHandler will wait for instance update -func UpdateInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { - handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { - s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) - if err != nil { - return false, nil, err - } - if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { - return false, nil, nil - } - switch strings.ToLower(string(*s.Status)) { - case strings.ToLower(InstanceStateSuccess): - return true, s, nil - case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): - return true, s, fmt.Errorf("update failed for instance with id %s", instanceId) - case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): - tflog.Info(ctx, "request is being handled", map[string]interface{}{ - "status": *s.Status, - }) - return false, nil, nil - default: - tflog.Info(ctx, "Wait (update) received unknown status", map[string]interface{}{ - "instanceId": instanceId, - "status": s.Status, - }) - return false, s, nil - } - }) - handler.SetSleepBeforeWait(15 * time.Second) - handler.SetTimeout(45 * time.Minute) +func UpdateInstanceWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region string, +) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { + s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) + if err != nil { + return false, nil, err + } + if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { + return false, nil, nil + } + switch strings.ToLower(string(*s.Status)) { + case strings.ToLower(InstanceStateSuccess): + return true, s, nil + case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): + return true, s, fmt.Errorf("update failed for instance with id %s", instanceId) + case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): + tflog.Info( + ctx, "request is being handled", map[string]interface{}{ + "status": *s.Status, + }, + ) + return false, s, nil + default: + tflog.Info( + ctx, "Wait (update) received unknown status", map[string]interface{}{ + "instanceId": instanceId, + "status": s.Status, + }, + ) + return false, s, nil + } + }, + ) return handler } -// PartialUpdateInstanceWaitHandler will wait for instance update -func PartialUpdateInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { - return UpdateInstanceWaitHandler(ctx, a, projectId, instanceId, region) +// DeleteInstanceWaitHandler will wait for instance deletion +func DeleteInstanceWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region string, +) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] { + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { + s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) + if err == nil { + return false, s, nil + } + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return true, nil, nil + }, + ) + handler.SetTimeout(30 * time.Minute) + return handler } -// DeleteInstanceWaitHandler will wait for instance deletion -func DeleteInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[struct{}] { - handler := wait.New(func() (waitFinished bool, response *struct{}, err error) { - _, err = a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) - if err == nil { +// CreateDatabaseWaitHandler will wait for instance creation +func CreateDatabaseWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region, databaseName string, +) *wait.AsyncActionHandler[sqlserverflex.GetDatabaseResponse] { + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetDatabaseResponse, err error) { + s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName) + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("get database - could not convert error to oapierror.GenericOpenAPIError: %s", err.Error()) + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return false, nil, nil + } + if s == nil || s.Name == nil || *s.Name != databaseName { + return false, nil, errors.New("response did return different result") + } + return true, s, nil + }, + ) + return handler +} + +// CreateUserWaitHandler will wait for instance creation +func CreateUserWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region string, + userId int64, +) *wait.AsyncActionHandler[sqlserverflex.GetUserResponse] { + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.GetUserResponse, err error) { + s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId) + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + return false, nil, nil + } + return true, s, nil + }, + ) + return handler +} + +// WaitForUserWaitHandler will wait for instance creation +func WaitForUserWaitHandler( + ctx context.Context, + a APIClientInterface, + projectId, instanceId, region, userName string, +) *wait.AsyncActionHandler[sqlserverflex.ListUserResponse] { + startTime := time.Now() + timeOut := 2 * time.Minute + + handler := wait.New( + func() (waitFinished bool, response *sqlserverflex.ListUserResponse, err error) { + if time.Since(startTime) > timeOut { + return false, nil, errors.New("ran into timeout") + } + s, err := a.ListUsersRequest(ctx, projectId, region, instanceId).Size(100).Execute() + if err != nil { + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("Wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", err.Error()) + } + if oapiErr.StatusCode != http.StatusNotFound { + return false, nil, err + } + tflog.Info( + ctx, "Wait (list users) still waiting", map[string]interface{}{}, + ) + + return false, nil, nil + } + users, ok := s.GetUsersOk() + if !ok { + return false, nil, errors.New("no users found") + } + + for _, u := range users { + if u.GetUsername() == userName { + return true, s, nil + } + } + tflog.Info( + ctx, "Wait (list users) user still not present", map[string]interface{}{}, + ) return false, nil, nil - } - var oapiErr *oapierror.GenericOpenAPIError - ok := errors.As(err, &oapiErr) - if !ok { - return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") - } - if oapiErr.StatusCode != http.StatusNotFound { - return false, nil, err - } - return true, nil, nil - }) - handler.SetTimeout(15 * time.Minute) + }, + ) return handler } // DeleteUserWaitHandler will wait for instance deletion func DeleteUserWaitHandler( ctx context.Context, - a APIClientUserInterface, - projectId, instanceId, region string, + a APIClientInterface, + projectId, region, instanceId string, userId int64, ) *wait.AsyncActionHandler[struct{}] { - handler := wait.New(func() (waitFinished bool, response *struct{}, err error) { - err = a.DeleteUserRequestExecute(ctx, projectId, region, instanceId, userId) - if err == nil { - return false, nil, nil - } - var oapiErr *oapierror.GenericOpenAPIError - ok := errors.As(err, &oapiErr) - if !ok { - return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") - } + handler := wait.New( + func() (waitFinished bool, response *struct{}, err error) { + _, err = a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId) + if err == nil { + return false, nil, nil + } + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError") + } - switch oapiErr.StatusCode { - case http.StatusNotFound: - return true, nil, nil - case http.StatusInternalServerError: - tflog.Warn(ctx, "Wait handler got error 500") - return false, nil, nil - default: - return false, nil, err - } - }) + switch oapiErr.StatusCode { + case http.StatusNotFound: + return true, nil, nil + default: + return false, nil, err + } + }, + ) handler.SetTimeout(15 * time.Minute) handler.SetSleepBeforeWait(15 * time.Second) return handler diff --git a/stackit/internal/wait/sqlserverflexalpha/wait_test.go b/stackit/internal/wait/sqlserverflexalpha/wait_test.go index 85cd8318..4c85e436 100644 --- a/stackit/internal/wait/sqlserverflexalpha/wait_test.go +++ b/stackit/internal/wait/sqlserverflexalpha/wait_test.go @@ -1,9 +1,8 @@ -// Copyright (c) STACKIT - package sqlserverflexalpha import ( "context" + "reflect" "testing" "time" @@ -23,7 +22,81 @@ type apiClientInstanceMocked struct { instanceGetFails bool } -func (a *apiClientInstanceMocked) GetInstanceRequestExecute(_ context.Context, _, _, _ string) (*sqlserverflex.GetInstanceResponse, error) { +type ListUsersRequestRequest struct{} + +func (l ListUsersRequestRequest) Page(_ int64) sqlserverflex.ApiListUsersRequestRequest { + return l +} + +func (l ListUsersRequestRequest) Size(_ int64) sqlserverflex.ApiListUsersRequestRequest { + return l +} + +func (l ListUsersRequestRequest) Sort(_ sqlserverflex.UserSort) sqlserverflex.ApiListUsersRequestRequest { + return l +} + +func (l ListUsersRequestRequest) Execute() (*sqlserverflex.ListUserResponse, error) { + // TODO implement me + panic("implement me") +} + +func (a *apiClientInstanceMocked) ListUsersRequest( + _ context.Context, + _ string, + _ string, + _ string, +) sqlserverflex.ApiListUsersRequestRequest { + return ListUsersRequestRequest{} +} + +func (a *apiClientInstanceMocked) ListRolesRequestExecute( + _ context.Context, + _ string, + _ string, + _ string, +) (*sqlserverflex.ListRolesResponse, error) { + return &sqlserverflex.ListRolesResponse{ + Roles: &[]string{}, + }, nil +} + +func (a *apiClientInstanceMocked) ListUsersRequestExecute( + _ context.Context, + _ string, + _ string, + _ string, +) (*sqlserverflex.ListUserResponse, error) { + return &sqlserverflex.ListUserResponse{ + Pagination: nil, + Users: nil, + }, nil +} + +func (a *apiClientInstanceMocked) GetDatabaseRequestExecute( + _ context.Context, + _ string, + _ string, + _ string, + _ string, +) (*sqlserverflex.GetDatabaseResponse, error) { + return nil, nil +} + +func (a *apiClientInstanceMocked) GetUserRequestExecute( + _ context.Context, + _ string, + _ string, + _ string, + _ int64, +) (*sqlserverflex.GetUserResponse, error) { + return nil, nil +} + +func (a *apiClientInstanceMocked) GetInstanceRequestExecute( + _ context.Context, + _, _, _ string, +) (*sqlserverflex.GetInstanceResponse, error) { if a.instanceGetFails { return nil, &oapierror.GenericOpenAPIError{ StatusCode: 500, @@ -43,9 +116,11 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute(_ context.Context, _ }, nil } func TestCreateInstanceWaitHandler(t *testing.T) { - t.Skip("skipping - needs refactoring") + //stateSuccess := utils.Ptr(InstanceStateSuccess) + instanceId := utils.Ptr("foo") tests := []struct { desc string + instanceId string instanceGetFails bool instanceState string instanceNetwork sqlserverflex.InstanceNetwork @@ -53,40 +128,42 @@ func TestCreateInstanceWaitHandler(t *testing.T) { wantErr bool wantRes *sqlserverflex.GetInstanceResponse }{ - { - desc: "create_succeeded", - instanceGetFails: false, - instanceState: InstanceStateSuccess, - instanceNetwork: sqlserverflex.InstanceNetwork{ - AccessScope: nil, - Acl: nil, - InstanceAddress: utils.Ptr("10.0.0.1"), - RouterAddress: utils.Ptr("10.0.0.2"), - }, - wantErr: false, - wantRes: &sqlserverflex.GetInstanceResponse{ - BackupSchedule: nil, - Edition: nil, - Encryption: nil, - FlavorId: nil, - Id: nil, - IsDeletable: nil, - Name: nil, - Network: &sqlserverflex.InstanceNetwork{ - AccessScope: nil, - Acl: nil, - InstanceAddress: utils.Ptr("10.0.0.1"), - RouterAddress: utils.Ptr("10.0.0.2"), - }, - Replicas: nil, - RetentionDays: nil, - Status: nil, - Storage: nil, - Version: nil, - }, - }, + //{ + // desc: "create_succeeded", + // instanceId: *instanceId, + // instanceGetFails: false, + // instanceState: *stateSuccess, + // instanceNetwork: sqlserverflex.InstanceNetwork{ + // AccessScope: nil, + // Acl: nil, + // InstanceAddress: utils.Ptr("10.0.0.1"), + // RouterAddress: utils.Ptr("10.0.0.2"), + // }, + // wantErr: false, + // wantRes: &sqlserverflex.GetInstanceResponse{ + // BackupSchedule: nil, + // Edition: nil, + // Encryption: nil, + // FlavorId: nil, + // Id: instanceId, + // IsDeletable: nil, + // Name: nil, + // Network: &sqlserverflex.InstanceNetwork{ + // AccessScope: nil, + // Acl: nil, + // InstanceAddress: utils.Ptr("10.0.0.1"), + // RouterAddress: utils.Ptr("10.0.0.2"), + // }, + // Replicas: nil, + // RetentionDays: nil, + // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(stateSuccess), + // Storage: nil, + // Version: nil, + // }, + //}, { desc: "create_failed", + instanceId: *instanceId, instanceGetFails: false, instanceState: InstanceStateFailed, wantErr: true, @@ -94,6 +171,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) { }, { desc: "create_failed_2", + instanceId: *instanceId, instanceGetFails: false, instanceState: InstanceStateEmpty, wantErr: true, @@ -101,12 +179,14 @@ func TestCreateInstanceWaitHandler(t *testing.T) { }, { desc: "instance_get_fails", + instanceId: *instanceId, instanceGetFails: true, wantErr: true, wantRes: nil, }, { desc: "timeout", + instanceId: *instanceId, instanceGetFails: false, instanceState: InstanceStateProcessing, wantErr: true, @@ -114,26 +194,26 @@ func TestCreateInstanceWaitHandler(t *testing.T) { }, } for _, tt := range tests { - t.Run(tt.desc, func(t *testing.T) { - instanceId := "foo-bar" + t.Run( + tt.desc, func(t *testing.T) { + apiClient := &apiClientInstanceMocked{ + instanceId: tt.instanceId, + instanceState: tt.instanceState, + instanceGetFails: tt.instanceGetFails, + } - apiClient := &apiClientInstanceMocked{ - instanceId: instanceId, - instanceState: tt.instanceState, - instanceGetFails: tt.instanceGetFails, - } + handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", tt.instanceId, "") - handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) + if (err != nil) != tt.wantErr { + t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) + } - gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) - if (err != nil) != tt.wantErr { - t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) - } - - if !cmp.Equal(gotRes, tt.wantRes) { - t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes) - } - }) + if !reflect.DeepEqual(gotRes, tt.wantRes) { + t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes) + } + }, + ) } } @@ -182,34 +262,36 @@ func TestUpdateInstanceWaitHandler(t *testing.T) { }, } for _, tt := range tests { - t.Run(tt.desc, func(t *testing.T) { - instanceId := "foo-bar" + t.Run( + tt.desc, func(t *testing.T) { + instanceId := "foo-bar" - apiClient := &apiClientInstanceMocked{ - instanceId: instanceId, - instanceState: tt.instanceState, - instanceGetFails: tt.instanceGetFails, - } - - var wantRes *sqlserverflex.GetInstanceResponse - if tt.wantResp { - wantRes = &sqlserverflex.GetInstanceResponse{ - Id: &instanceId, - Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)), + apiClient := &apiClientInstanceMocked{ + instanceId: instanceId, + instanceState: tt.instanceState, + instanceGetFails: tt.instanceGetFails, } - } - handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + var wantRes *sqlserverflex.GetInstanceResponse + if tt.wantResp { + wantRes = &sqlserverflex.GetInstanceResponse{ + Id: &instanceId, + Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)), + } + } - gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) + handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") - if (err != nil) != tt.wantErr { - t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) - } - if !cmp.Equal(gotRes, wantRes) { - t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes) - } - }) + gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background()) + + if (err != nil) != tt.wantErr { + t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) + } + if !cmp.Equal(gotRes, wantRes) { + t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes) + } + }, + ) } } @@ -239,23 +321,25 @@ func TestDeleteInstanceWaitHandler(t *testing.T) { }, } for _, tt := range tests { - t.Run(tt.desc, func(t *testing.T) { - instanceId := "foo-bar" + t.Run( + tt.desc, func(t *testing.T) { + instanceId := "foo-bar" - apiClient := &apiClientInstanceMocked{ - instanceGetFails: tt.instanceGetFails, - instanceIsDeleted: tt.instanceState == InstanceStateSuccess, - instanceId: instanceId, - instanceState: tt.instanceState, - } + apiClient := &apiClientInstanceMocked{ + instanceGetFails: tt.instanceGetFails, + instanceIsDeleted: tt.instanceState == InstanceStateSuccess, + instanceId: instanceId, + instanceState: tt.instanceState, + } - handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") + handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "") - _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background()) + _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background()) - if (err != nil) != tt.wantErr { - t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) - } - }) + if (err != nil) != tt.wantErr { + t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr) + } + }, + ) } } From 452f73877f3db17b470cfc0d537a36054e3c9257 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Fri, 13 Feb 2026 17:06:57 +0000 Subject: [PATCH 28/41] fix: fix pgsql db state (#62) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/62 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../internal/services/postgresflexalpha/database/resource.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index d88b8c8d..fd3f225c 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -179,7 +179,7 @@ func (r *databaseResource) Create( ctx = core.InitProviderContext(ctx) projectId := model.ProjectId.ValueString() - region := model.InstanceId.ValueString() + region := model.Region.ValueString() instanceId := model.InstanceId.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) From 43223f5d1f3b6e1b6a1e037032afbf1006275897 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Mon, 16 Feb 2026 09:04:16 +0000 Subject: [PATCH 29/41] fix: #63 sort user roles to prevent state change (#65) fix: include recent api changes Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/65 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .gitignore | 2 + .../sqlserverflexalpha_database.md | 2 +- .../data-sources/sqlserverflexalpha_flavor.md | 12 ++--- .../sqlserverflexalpha_instance.md | 1 - docs/data-sources/sqlserverflexalpha_user.md | 44 ++++++++++++----- docs/resources/postgresflexalpha_user.md | 1 - sample/postgres/postresql.tf | 8 ++-- .../postgresflexalpha/user/resource.go | 48 ++++++++++++++++--- .../user/resources_gen/user_resource_gen.go | 24 ++++------ .../sqlserverflexalpha/user/mapper.go | 16 +++++-- .../sqlserverflexalpha/user/resource.go | 47 ++++++++++++++++-- .../services/sqlserverflexbeta/user/mapper.go | 12 ++++- .../sqlserverflexbeta/user/resource.go | 48 ++++++++++++++++--- 13 files changed, 202 insertions(+), 63 deletions(-) diff --git a/.gitignore b/.gitignore index c588a0f1..1dac2ea3 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,5 @@ dist pkg_gen /release/ +.env +**/.env diff --git a/docs/data-sources/sqlserverflexalpha_database.md b/docs/data-sources/sqlserverflexalpha_database.md index 20e8a653..df66ffb7 100644 --- a/docs/data-sources/sqlserverflexalpha_database.md +++ b/docs/data-sources/sqlserverflexalpha_database.md @@ -26,7 +26,7 @@ description: |- - `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. - `compatibility_level` (Number) CompatibilityLevel of the Database. -- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".", +- `id` (String) The terraform internal identifier. - `name` (String) The name of the database. - `owner` (String) The owner of the database. - `tf_original_api_id` (Number) The id of the database. diff --git a/docs/data-sources/sqlserverflexalpha_flavor.md b/docs/data-sources/sqlserverflexalpha_flavor.md index 0dfc1fd2..7a03ecfb 100644 --- a/docs/data-sources/sqlserverflexalpha_flavor.md +++ b/docs/data-sources/sqlserverflexalpha_flavor.md @@ -29,20 +29,20 @@ data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" { ### Required - `cpu` (Number) The cpu count of the instance. -- `node_type` (String) defines the nodeType it can be either single or replica -- `project_id` (String) The cpu count of the instance. +- `node_type` (String) defines the nodeType it can be either single or HA +- `project_id` (String) The project ID of the flavor. - `ram` (Number) The memory of the instance in Gibibyte. -- `region` (String) The flavor description. +- `region` (String) The region of the flavor. - `storage_class` (String) The memory of the instance in Gibibyte. ### Read-Only - `description` (String) The flavor description. -- `flavor_id` (String) The flavor id of the instance flavor. -- `id` (String) The terraform id of the instance flavor. +- `flavor_id` (String) The id of the instance flavor. +- `id` (String) The id of the instance flavor. - `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte. - `min_gb` (Number) minimum storage which is required to order in Gigabyte. -- `storage_classes` (Attributes List) (see [below for nested schema](#nestedatt--storage_classes)) +- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes)) ### Nested Schema for `storage_classes` diff --git a/docs/data-sources/sqlserverflexalpha_instance.md b/docs/data-sources/sqlserverflexalpha_instance.md index f209c424..b05d7b8e 100644 --- a/docs/data-sources/sqlserverflexalpha_instance.md +++ b/docs/data-sources/sqlserverflexalpha_instance.md @@ -34,7 +34,6 @@ data "stackitprivatepreview_sqlserverflexalpha_instance" "example" { - `edition` (String) Edition of the MSSQL server instance - `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) - `flavor_id` (String) The id of the instance flavor. -- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`\". - `is_deletable` (Boolean) Whether the instance can be deleted or not. - `name` (String) The name of the instance. - `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) diff --git a/docs/data-sources/sqlserverflexalpha_user.md b/docs/data-sources/sqlserverflexalpha_user.md index b0b15341..63526135 100644 --- a/docs/data-sources/sqlserverflexalpha_user.md +++ b/docs/data-sources/sqlserverflexalpha_user.md @@ -3,12 +3,12 @@ page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview" subcategory: "" description: |- - SQLServer Flex user data source schema. Must have a region specified in the provider configuration. + --- # stackitprivatepreview_sqlserverflexalpha_user (Data Source) -SQLServer Flex user data source schema. Must have a `region` specified in the provider configuration. + ## Example Usage @@ -25,20 +25,38 @@ data "stackitprivatepreview_sqlserverflexalpha_user" "example" { ### Required -- `instance_id` (String) ID of the SQLServer Flex instance. -- `project_id` (String) STACKIT project ID to which the instance is associated. -- `user_id` (Number) User ID. +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed ### Optional -- `region` (String) The resource region. If not defined, the provider region is used. +- `page` (Number) Number of the page of items list to be returned. +- `size` (Number) Number of items to be returned on each page. +- `sort` (String) Sorting of the users to be returned on each page. ### Read-Only -- `default_database` (String) -- `host` (String) -- `id` (String) Terraform's internal data source. ID. It is structured as "`project_id`,`region`,`instance_id`,`user_id`". -- `port` (Number) -- `roles` (Set of String) Database access levels for the user. -- `status` (String) -- `username` (String) Username of the SQLServer Flex instance. +- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination)) +- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users)) + + +### Nested Schema for `pagination` + +Read-Only: + +- `page` (Number) +- `size` (Number) +- `sort` (String) +- `total_pages` (Number) +- `total_rows` (Number) + + + +### Nested Schema for `users` + +Read-Only: + +- `status` (String) The current status of the user. +- `tf_original_api_id` (Number) The ID of the user. +- `username` (String) The name of the user. diff --git a/docs/resources/postgresflexalpha_user.md b/docs/resources/postgresflexalpha_user.md index d68b920c..d0b9c500 100644 --- a/docs/resources/postgresflexalpha_user.md +++ b/docs/resources/postgresflexalpha_user.md @@ -44,7 +44,6 @@ import { ### Read-Only -- `connection_string` (String) The connection string for the user to the instance. - `id` (Number) The ID of the user. - `password` (String) The password for the user. - `status` (String) The current status of the user. diff --git a/sample/postgres/postresql.tf b/sample/postgres/postresql.tf index fa2f49e8..531b17e2 100644 --- a/sample/postgres/postresql.tf +++ b/sample/postgres/postresql.tf @@ -65,15 +65,15 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "msh-sna-pe-example2 resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser" { project_id = var.project_id instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id - username = var.db_admin_username - roles = ["createdb", "login"] + name = var.db_admin_username + roles = ["createdb", "login", "login"] # roles = ["createdb", "login", "createrole"] } resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser2" { project_id = var.project_id instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example2.instance_id - username = var.db_admin_username + name = var.db_admin_username roles = ["createdb", "login"] # roles = ["createdb", "login", "createrole"] } @@ -81,7 +81,7 @@ resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser2" { resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbuser" { project_id = var.project_id instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id - username = var.db_username + name = var.db_name roles = ["login"] # roles = ["createdb", "login", "createrole"] } diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go index 4adbaff0..cf9fd389 100644 --- a/stackit/internal/services/postgresflexalpha/user/resource.go +++ b/stackit/internal/services/postgresflexalpha/user/resource.go @@ -5,6 +5,7 @@ import ( _ "embed" "fmt" "math" + "slices" "strconv" "strings" "time" @@ -29,11 +30,12 @@ import ( var ( // Ensure the implementation satisfies the expected interfaces. - _ resource.Resource = &userResource{} - _ resource.ResourceWithConfigure = &userResource{} - _ resource.ResourceWithImportState = &userResource{} - _ resource.ResourceWithModifyPlan = &userResource{} - _ resource.ResourceWithIdentity = &userResource{} + _ resource.Resource = &userResource{} + _ resource.ResourceWithConfigure = &userResource{} + _ resource.ResourceWithImportState = &userResource{} + _ resource.ResourceWithModifyPlan = &userResource{} + _ resource.ResourceWithIdentity = &userResource{} + _ resource.ResourceWithValidateConfig = &userResource{} // Error message constants extractErrorSummary = "extracting failed" @@ -138,6 +140,39 @@ func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, res resp.Schema = s } +func (r *userResource) ValidateConfig( + ctx context.Context, + req resource.ValidateConfigRequest, + resp *resource.ValidateConfigResponse, +) { + var data resourceModel + + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + var roles []string + diags := data.Roles.ElementsAs(ctx, &roles, false) + resp.Diagnostics.Append(diags...) + if diags.HasError() { + return + } + + var resRoles []string + for _, role := range roles { + if slices.Contains(resRoles, role) { + resp.Diagnostics.AddAttributeError( + path.Root("roles"), + "Attribute Configuration Error", + "defined roles MUST NOT contain duplicates", + ) + return + } + resRoles = append(resRoles, role) + } +} + // Create creates the resource and sets the initial Terraform state. func (r *userResource) Create( ctx context.Context, @@ -217,7 +252,7 @@ func (r *userResource) Create( model.UserId = types.Int64Value(id) model.Password = types.StringValue(userResp.GetPassword()) model.Status = types.StringValue(userResp.GetStatus()) - model.ConnectionString = types.StringValue(userResp.GetConnectionString()) + //model.ConnectionString = types.StringValue(userResp.GetConnectionString()) waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler( ctx, @@ -712,5 +747,6 @@ func (r *userResource) expandRoles(ctx context.Context, rolesSet types.List, dia } var roles []string diags.Append(rolesSet.ElementsAs(ctx, &roles, false)...) + slices.Sort(roles) return roles } diff --git a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go index f07ab701..f96d8d93 100644 --- a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go +++ b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go @@ -14,11 +14,6 @@ import ( func UserResourceSchema(ctx context.Context) schema.Schema { return schema.Schema{ Attributes: map[string]schema.Attribute{ - "connection_string": schema.StringAttribute{ - Computed: true, - Description: "The connection string for the user to the instance.", - MarkdownDescription: "The connection string for the user to the instance.", - }, "id": schema.Int64Attribute{ Computed: true, Description: "The ID of the user.", @@ -80,14 +75,13 @@ func UserResourceSchema(ctx context.Context) schema.Schema { } type UserModel struct { - ConnectionString types.String `tfsdk:"connection_string"` - Id types.Int64 `tfsdk:"id"` - InstanceId types.String `tfsdk:"instance_id"` - Name types.String `tfsdk:"name"` - Password types.String `tfsdk:"password"` - ProjectId types.String `tfsdk:"project_id"` - Region types.String `tfsdk:"region"` - Roles types.List `tfsdk:"roles"` - Status types.String `tfsdk:"status"` - UserId types.Int64 `tfsdk:"user_id"` + Id types.Int64 `tfsdk:"id"` + InstanceId types.String `tfsdk:"instance_id"` + Name types.String `tfsdk:"name"` + Password types.String `tfsdk:"password"` + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Roles types.List `tfsdk:"roles"` + Status types.String `tfsdk:"status"` + UserId types.Int64 `tfsdk:"user_id"` } diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper.go b/stackit/internal/services/sqlserverflexalpha/user/mapper.go index 515c9ddc..8e522d59 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper.go @@ -2,6 +2,7 @@ package sqlserverflexalpha import ( "fmt" + "slices" "strconv" "github.com/hashicorp/terraform-plugin-framework/attr" @@ -44,8 +45,11 @@ func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *da if user.Roles == nil { model.Roles = types.List(types.SetNull(types.StringType)) } else { + resRoles := *user.Roles + slices.Sort(resRoles) + var roles []attr.Value - for _, role := range *user.Roles { + for _, role := range resRoles { roles = append(roles, types.StringValue(string(role))) } rolesSet, diags := types.SetValue(types.StringType, roles) @@ -92,8 +96,11 @@ func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *resourceMode // Map roles if user.Roles != nil { + resRoles := *user.Roles + slices.Sort(resRoles) + var roles []attr.Value - for _, role := range *user.Roles { + for _, role := range resRoles { roles = append(roles, types.StringValue(string(role))) } rolesSet, diags := types.SetValue(types.StringType, roles) @@ -139,8 +146,11 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *res model.Password = types.StringValue(*user.Password) if user.Roles != nil { + resRoles := *user.Roles + slices.Sort(resRoles) + var roles []attr.Value - for _, role := range *user.Roles { + for _, role := range resRoles { roles = append(roles, types.StringValue(string(role))) } rolesSet, diags := types.SetValue(types.StringType, roles) diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index 2b98c94c..8a0c1df7 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "net/http" + "slices" "strconv" "strings" "time" @@ -30,11 +31,12 @@ import ( ) var ( - _ resource.Resource = &userResource{} - _ resource.ResourceWithConfigure = &userResource{} - _ resource.ResourceWithImportState = &userResource{} - _ resource.ResourceWithModifyPlan = &userResource{} - _ resource.ResourceWithIdentity = &userResource{} + _ resource.Resource = &userResource{} + _ resource.ResourceWithConfigure = &userResource{} + _ resource.ResourceWithImportState = &userResource{} + _ resource.ResourceWithModifyPlan = &userResource{} + _ resource.ResourceWithIdentity = &userResource{} + _ resource.ResourceWithValidateConfig = &userResource{} ) func NewUserResource() resource.Resource { @@ -156,6 +158,39 @@ func (r *userResource) IdentitySchema( } } +func (r *userResource) ValidateConfig( + ctx context.Context, + req resource.ValidateConfigRequest, + resp *resource.ValidateConfigResponse, +) { + var data resourceModel + + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + var roles []string + diags := data.Roles.ElementsAs(ctx, &roles, false) + resp.Diagnostics.Append(diags...) + if diags.HasError() { + return + } + + var resRoles []string + for _, role := range roles { + if slices.Contains(resRoles, role) { + resp.Diagnostics.AddAttributeError( + path.Root("roles"), + "Attribute Configuration Error", + "defined roles MUST NOT contain duplicates", + ) + return + } + resRoles = append(resRoles, role) + } +} + // Create creates the resource and sets the initial Terraform state. func (r *userResource) Create( ctx context.Context, @@ -186,6 +221,8 @@ func (r *userResource) Create( if resp.Diagnostics.HasError() { return } + + slices.Sort(roles) } // Generate API request body from model diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper.go b/stackit/internal/services/sqlserverflexbeta/user/mapper.go index 9115906f..3674ac0b 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper.go @@ -2,6 +2,7 @@ package sqlserverflexbeta import ( "fmt" + "slices" "strconv" "github.com/hashicorp/terraform-plugin-framework/attr" @@ -92,10 +93,14 @@ func mapFields(userResp *sqlserverflexbeta.GetUserResponse, model *resourceModel // Map roles if user.Roles != nil { + resRoles := *user.Roles + slices.Sort(resRoles) + var roles []attr.Value - for _, role := range *user.Roles { + for _, role := range resRoles { roles = append(roles, types.StringValue(string(role))) } + rolesSet, diags := types.SetValue(types.StringType, roles) if diags.HasError() { return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) @@ -139,8 +144,11 @@ func mapFieldsCreate(userResp *sqlserverflexbeta.CreateUserResponse, model *reso model.Password = types.StringValue(*user.Password) if user.Roles != nil { + resRoles := *user.Roles + slices.Sort(resRoles) + var roles []attr.Value - for _, role := range *user.Roles { + for _, role := range resRoles { roles = append(roles, types.StringValue(string(role))) } rolesSet, diags := types.SetValue(types.StringType, roles) diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index 9508c743..8f19eb61 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "net/http" + "slices" "strconv" "strings" "time" @@ -30,11 +31,12 @@ import ( ) var ( - _ resource.Resource = &userResource{} - _ resource.ResourceWithConfigure = &userResource{} - _ resource.ResourceWithImportState = &userResource{} - _ resource.ResourceWithModifyPlan = &userResource{} - _ resource.ResourceWithIdentity = &userResource{} + _ resource.Resource = &userResource{} + _ resource.ResourceWithConfigure = &userResource{} + _ resource.ResourceWithImportState = &userResource{} + _ resource.ResourceWithModifyPlan = &userResource{} + _ resource.ResourceWithIdentity = &userResource{} + _ resource.ResourceWithValidateConfig = &userResource{} ) func NewUserResource() resource.Resource { @@ -156,6 +158,39 @@ func (r *userResource) IdentitySchema( } } +func (r *userResource) ValidateConfig( + ctx context.Context, + req resource.ValidateConfigRequest, + resp *resource.ValidateConfigResponse, +) { + var data resourceModel + + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + var roles []string + diags := data.Roles.ElementsAs(ctx, &roles, false) + resp.Diagnostics.Append(diags...) + if diags.HasError() { + return + } + + var resRoles []string + for _, role := range roles { + if slices.Contains(resRoles, role) { + resp.Diagnostics.AddAttributeError( + path.Root("roles"), + "Attribute Configuration Error", + "defined roles MUST NOT contain duplicates", + ) + return + } + resRoles = append(resRoles, role) + } +} + // Create creates the resource and sets the initial Terraform state. func (r *userResource) Create( ctx context.Context, @@ -186,6 +221,7 @@ func (r *userResource) Create( if resp.Diagnostics.HasError() { return } + slices.Sort(roles) } // Generate API request body from model @@ -379,7 +415,7 @@ func (r *userResource) Update( resp *resource.UpdateResponse, ) { // nolint:gocritic // function signature required by Terraform // Update shouldn't be called - core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", "User can't be updated") + core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", "an SQL server user can not be updated, only created") } // Delete deletes the resource and removes the Terraform state on success. From 20e9b3ca4c091bf9b3544a87c646cdeea0017e4a Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Mon, 16 Feb 2026 09:20:36 +0000 Subject: [PATCH 30/41] fix: #66 non generic api error handling (#67) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/67 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- stackit/internal/wait/sqlserverflexbeta/wait.go | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go index 60b1d74b..d898b47a 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait.go @@ -85,7 +85,17 @@ func CreateInstanceWaitHandler( func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) { s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId) if err != nil { - return false, nil, err + var oapiErr *oapierror.GenericOpenAPIError + ok := errors.As(err, &oapiErr) + if !ok { + return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError: %w", err) + } + switch oapiErr.StatusCode { + case http.StatusNotFound: + return false, nil, nil + default: + return false, nil, fmt.Errorf("api error: %w", err) + } } if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil { return false, nil, nil From d5644ec27f4b0db7ce08fe348cf7d56b44e48bb4 Mon Sep 17 00:00:00 2001 From: Andre Harms Date: Mon, 16 Feb 2026 09:35:21 +0000 Subject: [PATCH 31/41] chore: #64 add system hardening with retry logic for client (#68) - implement RetryRoundTripper Refs: #64 Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/68 Reviewed-by: Marcel_Henselin Co-authored-by: Andre Harms Co-committed-by: Andre Harms --- stackit/internal/core/retry_round_tripper.go | 239 +++++++++++++++++ .../internal/core/retry_round_tripper_test.go | 252 ++++++++++++++++++ stackit/provider.go | 22 +- 3 files changed, 512 insertions(+), 1 deletion(-) create mode 100644 stackit/internal/core/retry_round_tripper.go create mode 100644 stackit/internal/core/retry_round_tripper_test.go diff --git a/stackit/internal/core/retry_round_tripper.go b/stackit/internal/core/retry_round_tripper.go new file mode 100644 index 00000000..945a3061 --- /dev/null +++ b/stackit/internal/core/retry_round_tripper.go @@ -0,0 +1,239 @@ +package core + +import ( + "context" + "crypto/rand" + "errors" + "fmt" + "math/big" + "net/http" + "time" + + "github.com/hashicorp/terraform-plugin-log/tflog" +) + +const ( + // backoffMultiplier is the factor by which the delay is multiplied for exponential backoff. + backoffMultiplier = 2 + // jitterFactor is the divisor used to calculate jitter (e.g., half of the base delay). + jitterFactor = 2 +) + +var ( + // ErrRequestFailedAfterRetries is returned when a request fails after all retry attempts. + ErrRequestFailedAfterRetries = errors.New("request failed after all retry attempts") +) + +// RetryRoundTripper implements an http.RoundTripper that adds automatic retry logic for failed requests. +type RetryRoundTripper struct { + next http.RoundTripper + maxRetries int + initialDelay time.Duration + maxDelay time.Duration + perTryTimeout time.Duration +} + +// NewRetryRoundTripper creates a new instance of the RetryRoundTripper with the specified configuration. +func NewRetryRoundTripper( + next http.RoundTripper, + maxRetries int, + initialDelay, maxDelay, perTryTimeout time.Duration, +) *RetryRoundTripper { + return &RetryRoundTripper{ + next: next, + maxRetries: maxRetries, + initialDelay: initialDelay, + maxDelay: maxDelay, + perTryTimeout: perTryTimeout, + } +} + +// RoundTrip executes the request and retries on failure. +func (rrt *RetryRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + resp, err := rrt.executeRequest(req) + if !rrt.shouldRetry(resp, err) { + if err != nil { + return resp, fmt.Errorf("initial request failed, not retrying: %w", err) + } + + return resp, nil + } + + return rrt.retryLoop(req, resp, err) +} + +// executeRequest performs a single HTTP request with a per-try timeout. +func (rrt *RetryRoundTripper) executeRequest(req *http.Request) (*http.Response, error) { + ctx, cancel := context.WithTimeout(req.Context(), rrt.perTryTimeout) + defer cancel() + + resp, err := rrt.next.RoundTrip(req.WithContext(ctx)) + if err != nil { + if errors.Is(err, context.DeadlineExceeded) { + return resp, fmt.Errorf("per-try timeout of %v exceeded: %w", rrt.perTryTimeout, err) + } + + return resp, fmt.Errorf("http roundtrip failed: %w", err) + } + + return resp, nil +} + +// retryLoop handles the retry logic for a failed request. +func (rrt *RetryRoundTripper) retryLoop( + req *http.Request, + initialResp *http.Response, + initialErr error, +) (*http.Response, error) { + var ( + lastErr = initialErr + resp = initialResp + currentDelay = rrt.initialDelay + ) + + ctx := req.Context() + + for attempt := 1; attempt <= rrt.maxRetries; attempt++ { + rrt.logRetryAttempt(ctx, attempt, currentDelay, lastErr) + + waitDuration := rrt.calculateWaitDurationWithJitter(ctx, currentDelay) + if err := rrt.waitForDelay(ctx, waitDuration); err != nil { + return nil, err // Context was cancelled during wait. + } + + // Exponential backoff for the next potential retry. + currentDelay = rrt.updateCurrentDelay(currentDelay) + + // Retry attempt. + resp, lastErr = rrt.executeRequest(req) + if !rrt.shouldRetry(resp, lastErr) { + if lastErr != nil { + return resp, fmt.Errorf("request failed on retry attempt %d: %w", attempt, lastErr) + } + + return resp, nil + } + } + + return nil, rrt.handleFinalError(ctx, resp, lastErr) +} + +// logRetryAttempt logs the details of a retry attempt. +func (rrt *RetryRoundTripper) logRetryAttempt( + ctx context.Context, + attempt int, + delay time.Duration, + err error, +) { + tflog.Info( + ctx, "Request failed, retrying...", map[string]interface{}{ + "attempt": attempt, + "max_attempts": rrt.maxRetries, + "delay": delay, + "error": err, + }, + ) +} + +// updateCurrentDelay calculates the next delay for exponential backoff. +func (rrt *RetryRoundTripper) updateCurrentDelay(currentDelay time.Duration) time.Duration { + currentDelay *= backoffMultiplier + if currentDelay > rrt.maxDelay { + return rrt.maxDelay + } + + return currentDelay +} + +// handleFinalError constructs and returns the final error after all retries have been exhausted. +func (rrt *RetryRoundTripper) handleFinalError( + ctx context.Context, + resp *http.Response, + lastErr error, +) error { + if resp != nil { + if err := resp.Body.Close(); err != nil { + + tflog.Warn( + ctx, "Failed to close response body", map[string]interface{}{ + "error": err.Error(), + }, + ) + } + } + + if lastErr != nil { + return fmt.Errorf("%w: %w", ErrRequestFailedAfterRetries, lastErr) + } + + // This case occurs if shouldRetry was true due to a retryable status code, + // but all retries failed with similar status codes. + if resp != nil { + return fmt.Errorf( + "%w: last retry attempt failed with status code %d", + ErrRequestFailedAfterRetries, + resp.StatusCode, + ) + } + + return fmt.Errorf("%w: no response received", ErrRequestFailedAfterRetries) +} + +// shouldRetry determines if a request should be retried based on the response or an error. +func (rrt *RetryRoundTripper) shouldRetry(resp *http.Response, err error) bool { + if err != nil { + return true + } + + if resp != nil { + if resp.StatusCode == http.StatusBadGateway || + resp.StatusCode == http.StatusServiceUnavailable || + resp.StatusCode == http.StatusGatewayTimeout { + return true + } + } + + return false + +} + +// calculateWaitDurationWithJitter calculates the backoff duration for the next retry, +// adding a random jitter to prevent thundering herd issues. +func (rrt *RetryRoundTripper) calculateWaitDurationWithJitter( + ctx context.Context, + baseDelay time.Duration, +) time.Duration { + if baseDelay <= 0 { + return 0 + } + + maxJitter := int64(baseDelay / jitterFactor) + if maxJitter <= 0 { + return baseDelay + } + + random, err := rand.Int(rand.Reader, big.NewInt(maxJitter)) + if err != nil { + tflog.Warn( + ctx, "Failed to generate random jitter, proceeding without it.", map[string]interface{}{ + "error": err.Error(), + }, + ) + + return baseDelay + } + + jitter := time.Duration(random.Int64()) + + return baseDelay + jitter +} + +// waitForDelay pauses execution for a given duration or until the context is canceled. +func (rrt *RetryRoundTripper) waitForDelay(ctx context.Context, delay time.Duration) error { + select { + case <-ctx.Done(): + return fmt.Errorf("context cancelled during backoff wait: %w", ctx.Err()) + case <-time.After(delay): + return nil + } +} diff --git a/stackit/internal/core/retry_round_tripper_test.go b/stackit/internal/core/retry_round_tripper_test.go new file mode 100644 index 00000000..e19c553c --- /dev/null +++ b/stackit/internal/core/retry_round_tripper_test.go @@ -0,0 +1,252 @@ +package core + +import ( + "context" + "errors" + "io" + "net/http" + "net/http/httptest" + "strings" + "sync/atomic" + "testing" + "time" +) + +type mockRoundTripper struct { + roundTripFunc func(req *http.Request) (*http.Response, error) + callCount int32 +} + +func (m *mockRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + atomic.AddInt32(&m.callCount, 1) + + return m.roundTripFunc(req) +} + +func (m *mockRoundTripper) CallCount() int32 { + return atomic.LoadInt32(&m.callCount) +} + +func TestRetryRoundTripper_RoundTrip(t *testing.T) { + t.Parallel() + + testRetryConfig := func(next http.RoundTripper) *RetryRoundTripper { + return NewRetryRoundTripper( + next, + 3, + 1*time.Millisecond, + 10*time.Millisecond, + 50*time.Millisecond, + ) + } + + noRetryTests := []struct { + name string + mockStatusCode int + expectedStatusCode int + }{ + { + name: "should succeed on the first try", + mockStatusCode: http.StatusOK, + expectedStatusCode: http.StatusOK, + }, + { + name: "should not retry on a non-retryable status code like 400", + mockStatusCode: http.StatusBadRequest, + expectedStatusCode: http.StatusBadRequest, + }, + } + + for _, testCase := range noRetryTests { + t.Run( + testCase.name, func(t *testing.T) { + t.Parallel() + + mock := &mockRoundTripper{ + roundTripFunc: func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: testCase.mockStatusCode, + Body: io.NopCloser(nil), + Request: req, + }, nil + }, + } + tripper := testRetryConfig(mock) + req := httptest.NewRequest(http.MethodGet, "/", nil) + + resp, err := tripper.RoundTrip(req) + if resp != nil { + defer func() { + if closeErr := resp.Body.Close(); closeErr != nil { + t.Errorf("failed to close response body: %v", closeErr) + } + }() + } + + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if resp.StatusCode != testCase.expectedStatusCode { + t.Fatalf("expected status code %d, got %d", testCase.expectedStatusCode, resp.StatusCode) + } + if mock.CallCount() != 1 { + t.Fatalf("expected 1 call, got %d", mock.CallCount()) + } + }, + ) + } + + t.Run( + "should retry on retryable status code (503) and eventually fail", func(t *testing.T) { + t.Parallel() + + mock := &mockRoundTripper{ + roundTripFunc: func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: http.StatusServiceUnavailable, + Body: io.NopCloser(nil), + Request: req, + }, nil + }, + } + tripper := testRetryConfig(mock) + req := httptest.NewRequest(http.MethodGet, "/", nil) + + resp, err := tripper.RoundTrip(req) + if resp != nil { + defer func() { + if closeErr := resp.Body.Close(); closeErr != nil { + t.Errorf("failed to close response body: %v", closeErr) + } + }() + } + + if err == nil { + t.Fatal("expected an error, but got nil") + } + expectedErrorMsg := "last retry attempt failed with status code 503" + if !strings.Contains(err.Error(), expectedErrorMsg) { + t.Fatalf("expected error to contain %q, got %q", expectedErrorMsg, err.Error()) + } + if mock.CallCount() != 4 { // 1 initial + 3 retries + t.Fatalf("expected 4 calls, got %d", mock.CallCount()) + } + }, + ) + + t.Run( + "should succeed after one retry", func(t *testing.T) { + t.Parallel() + + mock := &mockRoundTripper{} + mock.roundTripFunc = func(req *http.Request) (*http.Response, error) { + if mock.CallCount() < 2 { + return &http.Response{ + StatusCode: http.StatusServiceUnavailable, + Body: io.NopCloser(nil), + Request: req, + }, nil + } + + return &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(nil), + Request: req, + }, nil + } + tripper := testRetryConfig(mock) + req := httptest.NewRequest(http.MethodGet, "/", nil) + + resp, err := tripper.RoundTrip(req) + if resp != nil { + defer func() { + if closeErr := resp.Body.Close(); closeErr != nil { + t.Errorf("failed to close response body: %v", closeErr) + } + }() + } + + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if resp.StatusCode != http.StatusOK { + t.Fatalf("expected status code %d, got %d", http.StatusOK, resp.StatusCode) + } + if mock.CallCount() != 2 { + t.Fatalf("expected 2 calls, got %d", mock.CallCount()) + } + }, + ) + + t.Run( + "should retry on network error", func(t *testing.T) { + t.Parallel() + + mockErr := errors.New("simulated network error") + + mock := &mockRoundTripper{ + roundTripFunc: func(req *http.Request) (*http.Response, error) { + return nil, mockErr + }, + } + tripper := testRetryConfig(mock) + req := httptest.NewRequest(http.MethodGet, "/", nil) + + resp, err := tripper.RoundTrip(req) + if resp != nil { + defer func() { + if closeErr := resp.Body.Close(); closeErr != nil { + t.Errorf("failed to close response body: %v", closeErr) + } + }() + } + + if !errors.Is(err, mockErr) { + t.Fatalf("expected error to be %v, got %v", mockErr, err) + } + if mock.CallCount() != 4 { // 1 initial + 3 retries + t.Fatalf("expected 4 calls, got %d", mock.CallCount()) + } + }, + ) + + t.Run( + "should abort retries if the main context is cancelled", func(t *testing.T) { + t.Parallel() + + mock := &mockRoundTripper{ + roundTripFunc: func(req *http.Request) (*http.Response, error) { + select { + case <-time.After(100 * time.Millisecond): + return nil, errors.New("this should not be returned") + case <-req.Context().Done(): + return nil, req.Context().Err() + } + }, + } + tripper := testRetryConfig(mock) + baseCtx := context.Background() + + ctx, cancel := context.WithTimeout(baseCtx, 20*time.Millisecond) + defer cancel() + + req := httptest.NewRequest(http.MethodGet, "/", nil).WithContext(ctx) + + resp, err := tripper.RoundTrip(req) + if resp != nil { + defer func() { + if closeErr := resp.Body.Close(); closeErr != nil { + t.Errorf("failed to close response body: %v", closeErr) + } + }() + } + + if !errors.Is(err, context.DeadlineExceeded) { + t.Fatalf("expected error to be context.DeadlineExceeded, got %v", err) + } + if mock.CallCount() != 1 { + t.Fatalf("expected 1 call, got %d", mock.CallCount()) + } + }, + ) +} diff --git a/stackit/provider.go b/stackit/provider.go index b90d7375..ab3dd060 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -6,6 +6,7 @@ import ( "context" "fmt" "strings" + "time" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/datasource" @@ -45,6 +46,17 @@ var ( _ provider.Provider = &Provider{} ) +const ( + // maxRetries is the maximum number of retries for a failed HTTP request. + maxRetries = 3 + // initialDelay is the initial delay before the first retry attempt. + initialDelay = 2 * time.Second + // maxDelay is the maximum delay between retry attempts. + maxDelay = 90 * time.Second + // perTryTimeout is the timeout for each individual HTTP request attempt. + perTryTimeout = 30 * time.Second +) + // Provider is the provider implementation. type Provider struct { version string @@ -466,7 +478,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, providerData.Experiments = experimentValues } - roundTripper, err := sdkauth.SetupAuth(sdkConfig) + baseRoundTripper, err := sdkauth.SetupAuth(sdkConfig) if err != nil { core.LogAndAddError( ctx, @@ -477,6 +489,14 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, return } + roundTripper := core.NewRetryRoundTripper( + baseRoundTripper, + maxRetries, + initialDelay, + maxDelay, + perTryTimeout, + ) + // Make round tripper and custom endpoints available during DataSource and Resource // type Configure methods. providerData.RoundTripper = roundTripper From 7ee82366d7d6bf6a62d0ad7542e43e4a67b6b59d Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Mon, 16 Feb 2026 13:40:05 +0000 Subject: [PATCH 32/41] fix: try fix errors (#71) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/71 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../postgresflexalpha/instance/resource.go | 2 +- .../testdata/instance_template.gompl | 2 +- .../sqlserverflexalpha/database/resource.go | 2 +- .../sqlserverflexalpha/instance/resource.go | 2 +- .../sqlserverflexalpha/user/mapper_test.go | 18 ++--- .../sqlserverflexbeta/database/resource.go | 2 +- .../sqlserverflexbeta/instance/resource.go | 9 +-- .../sqlserverflex_acc_test.go | 65 ++++++++++++++++++- .../services/sqlserverflexbeta/user/mapper.go | 19 ++++-- .../sqlserverflexbeta/user/mapper_test.go | 14 ++-- .../sqlserverflexbeta/user/planModifiers.yaml | 4 ++ .../sqlserverflexbeta/user/resource.go | 33 ++++++++++ .../internal/wait/postgresflexalpha/wait.go | 2 +- .../internal/wait/sqlserverflexbeta/wait.go | 6 +- 14 files changed, 140 insertions(+), 40 deletions(-) diff --git a/stackit/internal/services/postgresflexalpha/instance/resource.go b/stackit/internal/services/postgresflexalpha/instance/resource.go index 0b354c64..f6402220 100644 --- a/stackit/internal/services/postgresflexalpha/instance/resource.go +++ b/stackit/internal/services/postgresflexalpha/instance/resource.go @@ -593,7 +593,7 @@ func (r *instanceResource) ImportState( ctx, &resp.Diagnostics, "Error importing instance", fmt.Sprintf( - "Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", + "Expected import identifier with format [project_id],[region],[instance_id] Got: %q", req.ID, ), ) diff --git a/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl b/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl index 83444f7c..260f0b57 100644 --- a/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl +++ b/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl @@ -48,7 +48,7 @@ resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" { project_id = "{{ $db.ProjectId }}" instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id name = "{{ $db.Name }}" - owner = "{{ $db.Owner }}" + owner = stackitprivatepreview_postgresflexalpha_user.{{ $db.Owner }}.name } {{ end }} {{ end }} diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index e7887330..91761b18 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -548,7 +548,7 @@ func (r *databaseResource) ImportState( ctx, &resp.Diagnostics, "Error importing database", fmt.Sprintf( - "Expected import identifier with format: [project_id],[region],[instance_id],[database_name] Got: %q", + "Expected import identifier with format [project_id],[region],[instance_id],[database_name] Got: %q", req.ID, ), ) diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go index 41f9027c..6adf07df 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go @@ -523,7 +523,7 @@ func (r *instanceResource) ImportState( ctx, &resp.Diagnostics, "Error importing instance", fmt.Sprintf( - "Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", + "Expected import identifier with format [project_id],[region],[instance_id] Got: %q", req.ID, ), ) diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go index f981eb0e..b116b8dd 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go @@ -63,9 +63,9 @@ func TestMapDataSourceFields(t *testing.T) { Roles: types.List( types.SetValueMust( types.StringType, []attr.Value{ + types.StringValue(""), types.StringValue("role_1"), types.StringValue("role_2"), - types.StringValue(""), }, ), ), @@ -138,7 +138,7 @@ func TestMapDataSourceFields(t *testing.T) { t.Fatalf("Should not have failed: %v", err) } if tt.isValid { - diff := cmp.Diff(state, &tt.expected) + diff := cmp.Diff(&tt.expected, state) if diff != "" { t.Fatalf("Data does not match: %s", diff) } @@ -183,8 +183,8 @@ func TestMapFieldsCreate(t *testing.T) { &sqlserverflexalpha.CreateUserResponse{ Id: utils.Ptr(int64(2)), Roles: &[]string{ - "role_1", "role_2", + "role_1", "", }, Username: utils.Ptr("username"), @@ -204,9 +204,9 @@ func TestMapFieldsCreate(t *testing.T) { Roles: types.List( types.SetValueMust( types.StringType, []attr.Value{ + types.StringValue(""), types.StringValue("role_1"), types.StringValue("role_2"), - types.StringValue(""), }, ), ), @@ -292,7 +292,7 @@ func TestMapFieldsCreate(t *testing.T) { t.Fatalf("Should not have failed: %v", err) } if tt.isValid { - diff := cmp.Diff(state, &tt.expected) + diff := cmp.Diff(&tt.expected, state) if diff != "" { t.Fatalf("Data does not match: %s", diff) } @@ -332,8 +332,8 @@ func TestMapFields(t *testing.T) { "simple_values", &sqlserverflexalpha.GetUserResponse{ Roles: &[]string{ - "role_1", "role_2", + "role_1", "", }, Username: utils.Ptr("username"), @@ -350,9 +350,9 @@ func TestMapFields(t *testing.T) { Roles: types.List( types.SetValueMust( types.StringType, []attr.Value{ + types.StringValue(""), types.StringValue("role_1"), types.StringValue("role_2"), - types.StringValue(""), }, ), ), @@ -423,7 +423,7 @@ func TestMapFields(t *testing.T) { t.Fatalf("Should not have failed: %v", err) } if tt.isValid { - diff := cmp.Diff(state, &tt.expected) + diff := cmp.Diff(&tt.expected, state) if diff != "" { t.Fatalf("Data does not match: %s", diff) } @@ -516,7 +516,7 @@ func TestToCreatePayload(t *testing.T) { t.Fatalf("Should not have failed: %v", err) } if tt.isValid { - diff := cmp.Diff(output, tt.expected) + diff := cmp.Diff(tt.expected, output) if diff != "" { t.Fatalf("Data does not match: %s", diff) } diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 39879cae..8eb93b54 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -548,7 +548,7 @@ func (r *databaseResource) ImportState( ctx, &resp.Diagnostics, "Error importing database", fmt.Sprintf( - "Expected import identifier with format: [project_id],[region],[instance_id],[database_name] Got: %q", + "Expected import identifier with format [project_id],[region],[instance_id],[database_name] Got: %q", req.ID, ), ) diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go index c04c1fd3..404f665b 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -293,13 +293,6 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r return } - // Read identity data - var identityData InstanceResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) projectId := data.ProjectId.ValueString() @@ -523,7 +516,7 @@ func (r *instanceResource) ImportState( ctx, &resp.Diagnostics, "Error importing instance", fmt.Sprintf( - "Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", + "Expected import identifier with format [project_id],[region],[instance_id] Got: %q", req.ID, ), ) diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go index e3fd5473..0f0b47ea 100644 --- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go @@ -15,7 +15,7 @@ import ( "github.com/stackitcloud/stackit-sdk-go/core/config" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" - sqlserverflexbeta2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" // The fwresource import alias is so there is no collision @@ -36,7 +36,7 @@ func init() { F: func(region string) error { ctx := context.Background() apiClientConfigOptions := []config.ConfigurationOption{} - apiClient, err := sqlserverflexbeta2.NewAPIClient(apiClientConfigOptions...) + apiClient, err := sqlserverflexbetaResGen.NewAPIClient(apiClientConfigOptions...) if err != nil { log.Fatalln(err) } @@ -228,6 +228,67 @@ func TestAccInstance(t *testing.T) { }) } +func TestAccInstanceReApply(t *testing.T) { + exData := getExample() + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", exData.TfName) + testInstances = append(testInstances, exData.TfName) + }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + exData, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name), + resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"), + // TODO: check all fields + ), + }, + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + exData, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name), + resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"), + // TODO: check all fields + ), + }, + { + RefreshState: true, + }, + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + exData, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name), + resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"), + // TODO: check all fields + ), + }, + // Import test + { + ResourceName: resName("instance", exData.TfName), + ImportStateKind: resource.ImportBlockWithResourceIdentity, + ImportState: true, + // ImportStateVerify is not supported with plannable import blocks + // ImportStateVerify: true, + }, + }, + }) +} + func TestAccInstanceNoEncryption(t *testing.T) { data := getExample() diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper.go b/stackit/internal/services/sqlserverflexbeta/user/mapper.go index 3674ac0b..54ba2547 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper.go @@ -46,7 +46,9 @@ func mapDataSourceFields(userResp *sqlserverflexbeta.GetUserResponse, model *dat model.Roles = types.List(types.SetNull(types.StringType)) } else { var roles []attr.Value - for _, role := range *user.Roles { + resRoles := *user.Roles + slices.Sort(resRoles) + for _, role := range resRoles { roles = append(roles, types.StringValue(string(role))) } rolesSet, diags := types.SetValue(types.StringType, roles) @@ -183,9 +185,14 @@ func toCreatePayload( return nil, fmt.Errorf("nil model") } - return &sqlserverflexbeta.CreateUserRequestPayload{ - Username: conversion.StringValueToPointer(model.Username), - DefaultDatabase: conversion.StringValueToPointer(model.DefaultDatabase), - Roles: &roles, - }, nil + pl := sqlserverflexbeta.CreateUserRequestPayload{ + Username: conversion.StringValueToPointer(model.Username), + Roles: &roles, + } + slices.Sort(roles) + if !model.DefaultDatabase.IsNull() || !model.DefaultDatabase.IsUnknown() { + pl.DefaultDatabase = conversion.StringValueToPointer(model.DefaultDatabase) + } + + return &pl, nil } diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go index b0e2bb02..c0e09bda 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go @@ -63,9 +63,9 @@ func TestMapDataSourceFields(t *testing.T) { Roles: types.List( types.SetValueMust( types.StringType, []attr.Value{ + types.StringValue(""), types.StringValue("role_1"), types.StringValue("role_2"), - types.StringValue(""), }, ), ), @@ -138,7 +138,7 @@ func TestMapDataSourceFields(t *testing.T) { t.Fatalf("Should not have failed: %v", err) } if tt.isValid { - diff := cmp.Diff(state, &tt.expected) + diff := cmp.Diff(&tt.expected, state) if diff != "" { t.Fatalf("Data does not match: %s", diff) } @@ -204,9 +204,9 @@ func TestMapFieldsCreate(t *testing.T) { Roles: types.List( types.SetValueMust( types.StringType, []attr.Value{ + types.StringValue(""), types.StringValue("role_1"), types.StringValue("role_2"), - types.StringValue(""), }, ), ), @@ -292,7 +292,7 @@ func TestMapFieldsCreate(t *testing.T) { t.Fatalf("Should not have failed: %v", err) } if tt.isValid { - diff := cmp.Diff(state, &tt.expected) + diff := cmp.Diff(&tt.expected, state) if diff != "" { t.Fatalf("Data does not match: %s", diff) } @@ -332,8 +332,8 @@ func TestMapFields(t *testing.T) { "simple_values", &sqlserverflexbeta.GetUserResponse{ Roles: &[]string{ - "role_1", "role_2", + "role_1", "", }, Username: utils.Ptr("username"), @@ -350,9 +350,9 @@ func TestMapFields(t *testing.T) { Roles: types.List( types.SetValueMust( types.StringType, []attr.Value{ + types.StringValue(""), types.StringValue("role_1"), types.StringValue("role_2"), - types.StringValue(""), }, ), ), @@ -423,7 +423,7 @@ func TestMapFields(t *testing.T) { t.Fatalf("Should not have failed: %v", err) } if tt.isValid { - diff := cmp.Diff(state, &tt.expected) + diff := cmp.Diff(&tt.expected, state) if diff != "" { t.Fatalf("Data does not match: %s", diff) } diff --git a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml index 8ff346ab..43b029e8 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml +++ b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml @@ -2,6 +2,7 @@ fields: - name: 'id' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'instance_id' validators: @@ -22,6 +23,7 @@ fields: - name: 'region' modifiers: - 'RequiresReplace' + - 'RequiresReplace' - name: 'user_id' modifiers: @@ -31,10 +33,12 @@ fields: - name: 'username' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'roles' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'password' modifiers: diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index 8f19eb61..9159cf01 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -11,6 +11,7 @@ import ( "strings" "time" + "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" @@ -107,6 +108,38 @@ func (r *userResource) ModifyPlan( return } + // TODO: verify if this is needed - START + var configRoles []string + diags := configModel.Roles.ElementsAs(ctx, &configRoles, false) + resp.Diagnostics.Append(diags...) + if diags.HasError() { + return + } + + var planRoles []string + diags = planModel.Roles.ElementsAs(ctx, &planRoles, false) + resp.Diagnostics.Append(diags...) + if diags.HasError() { + return + } + + slices.Sort(configRoles) + slices.Sort(planRoles) + + if !slices.Equal(configRoles, planRoles) { + var roles []attr.Value + for _, role := range configRoles { + roles = append(roles, types.StringValue(string(role))) + } + rolesSet, diags := types.SetValue(types.StringType, roles) + resp.Diagnostics.Append(diags...) + if diags.HasError() { + return + } + planModel.Roles = types.List(rolesSet) + } + // TODO: verify if this is needed - END + resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) if resp.Diagnostics.HasError() { return diff --git a/stackit/internal/wait/postgresflexalpha/wait.go b/stackit/internal/wait/postgresflexalpha/wait.go index 5c41c4b1..57106cec 100644 --- a/stackit/internal/wait/postgresflexalpha/wait.go +++ b/stackit/internal/wait/postgresflexalpha/wait.go @@ -208,7 +208,7 @@ func PartialUpdateInstanceWaitHandler( case InstanceStateUnknown: return false, nil, nil case InstanceStateFailed: - return true, s, fmt.Errorf("update failed for instance with id %s", instanceId) + return true, s, fmt.Errorf("update got status FAILURE for instance with id %s", instanceId) } }, ) diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go index d898b47a..41bfa2c1 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait.go @@ -161,8 +161,10 @@ func CreateInstanceWaitHandler( return false, nil, nil } return true, s, nil - case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed): - return true, nil, fmt.Errorf("create failed for instance with id %s", instanceId) + case strings.ToLower(InstanceStateUnknown): + return true, nil, fmt.Errorf("create failed for instance %s with status %s", instanceId, InstanceStateUnknown) + case strings.ToLower(InstanceStateFailed): + return true, nil, fmt.Errorf("create failed for instance %s with status %s", instanceId, InstanceStateFailed) case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): tflog.Info( ctx, "request is being handled", map[string]interface{}{ From f05e90c35a6fe44bc8460108bf341421ab088b88 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Mon, 16 Feb 2026 16:12:53 +0000 Subject: [PATCH 33/41] fix: some more fix tests (#72) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/72 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../postgresflexalpha/database/resource.go | 20 +-- .../postgresflexalpha/user/mapper_test.go | 102 +++++++------- .../sqlserverflexalpha/database/resource.go | 124 ++++-------------- .../sqlserverflexalpha/user/mapper_test.go | 18 ++- .../sqlserverflexbeta/database/mapper.go | 5 +- .../sqlserverflexbeta/database/resource.go | 27 +--- .../sqlserverflex_acc_test.go | 4 +- .../services/sqlserverflexbeta/user/mapper.go | 12 +- .../sqlserverflexbeta/user/resource.go | 51 +++---- stackit/provider.go | 17 +-- 10 files changed, 137 insertions(+), 243 deletions(-) diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index fd3f225c..1cf3f2df 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -281,24 +281,12 @@ func (r *databaseResource) Read( return } - // Read identity data - var identityData DatabaseResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) - projectId, region, instanceId, databaseId, errExt := r.extractIdentityData(model, identityData) - if errExt != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - extractErrorSummary, - fmt.Sprintf(extractErrorMessage, errExt), - ) - } + projectId := model.ProjectId.ValueString() + instanceId := model.InstanceId.ValueString() + region := model.Region.ValueString() + databaseId := model.DatabaseId.ValueInt64() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) diff --git a/stackit/internal/services/postgresflexalpha/user/mapper_test.go b/stackit/internal/services/postgresflexalpha/user/mapper_test.go index 1a8c3f99..5014d4ac 100644 --- a/stackit/internal/services/postgresflexalpha/user/mapper_test.go +++ b/stackit/internal/services/postgresflexalpha/user/mapper_test.go @@ -164,16 +164,16 @@ func TestMapFieldsCreate(t *testing.T) { }, testRegion, resourceModel{ - Id: types.Int64Value(1), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringNull(), - Roles: types.List(types.SetNull(types.StringType)), - Password: types.StringNull(), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Password: types.StringNull(), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + //ConnectionString: types.StringNull(), }, true, }, @@ -186,16 +186,16 @@ func TestMapFieldsCreate(t *testing.T) { }, testRegion, resourceModel{ - Id: types.Int64Value(1), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringValue("username"), - Roles: types.List(types.SetNull(types.StringType)), - Password: types.StringNull(), - Region: types.StringValue(testRegion), - Status: types.StringValue("status"), - ConnectionString: types.StringNull(), + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringValue("username"), + Roles: types.List(types.SetNull(types.StringType)), + Password: types.StringNull(), + Region: types.StringValue(testRegion), + Status: types.StringValue("status"), + //ConnectionString: types.StringNull(), }, true, }, @@ -208,16 +208,16 @@ func TestMapFieldsCreate(t *testing.T) { }, testRegion, resourceModel{ - Id: types.Int64Value(1), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringNull(), - Roles: types.List(types.SetNull(types.StringType)), - Password: types.StringNull(), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Password: types.StringNull(), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + //ConnectionString: types.StringNull(), }, true, }, @@ -285,15 +285,15 @@ func TestMapFields(t *testing.T) { }, testRegion, resourceModel{ - Id: types.Int64Value(1), - UserId: types.Int64Value(int64(1)), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringNull(), - Roles: types.List(types.SetNull(types.StringType)), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), + Id: types.Int64Value(1), + UserId: types.Int64Value(int64(1)), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + //ConnectionString: types.StringNull(), }, true, }, @@ -324,9 +324,9 @@ func TestMapFields(t *testing.T) { }, ), ), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + //ConnectionString: types.StringNull(), }, true, }, @@ -338,15 +338,15 @@ func TestMapFields(t *testing.T) { }, testRegion, resourceModel{ - Id: types.Int64Value(1), - UserId: types.Int64Value(1), - InstanceId: types.StringValue("iid"), - ProjectId: types.StringValue("pid"), - Name: types.StringNull(), - Roles: types.List(types.SetNull(types.StringType)), - Region: types.StringValue(testRegion), - Status: types.StringNull(), - ConnectionString: types.StringNull(), + Id: types.Int64Value(1), + UserId: types.Int64Value(1), + InstanceId: types.StringValue("iid"), + ProjectId: types.StringValue("pid"), + Name: types.StringNull(), + Roles: types.List(types.SetNull(types.StringType)), + Region: types.StringValue(testRegion), + Status: types.StringNull(), + //ConnectionString: types.StringNull(), }, true, }, diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index 91761b18..eec284f1 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -186,25 +186,25 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques payLoad.Name = data.Name.ValueStringPointer() payLoad.Owner = data.Owner.ValueStringPointer() - _, err := wait.WaitForUserWaitHandler( - ctx, - r.client, - projectId, - instanceId, - region, - data.Owner.ValueString(), - ). - SetSleepBeforeWait(10 * time.Second). - WaitWithContext(ctx) - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - createErr, - fmt.Sprintf("Calling API: %v", err), - ) - return - } + //_, err := wait.WaitForUserWaitHandler( + // ctx, + // r.client, + // projectId, + // instanceId, + // region, + // data.Owner.ValueString(), + //). + // SetSleepBeforeWait(10 * time.Second). + // WaitWithContext(ctx) + //if err != nil { + // core.LogAndAddError( + // ctx, + // &resp.Diagnostics, + // createErr, + // fmt.Sprintf("Calling API: %v", err), + // ) + // return + //} createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId). CreateDatabaseRequestPayload(payLoad). @@ -361,15 +361,10 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r ctx = core.InitProviderContext(ctx) - projectId, region, instanceId, databaseName, errExt := r.extractIdentityData(model, identityData) - if errExt != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - extractErrorSummary, - fmt.Sprintf(extractErrorMessage, errExt), - ) - } + projectId := model.ProjectId.ValueString() + region := model.Region.ValueString() + instanceId := model.InstanceId.ValueString() + databaseName := model.DatabaseName.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) @@ -445,15 +440,10 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques ctx = core.InitProviderContext(ctx) - projectId, region, instanceId, databaseName, errExt := r.extractIdentityData(model, identityData) - if errExt != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - extractErrorSummary, - fmt.Sprintf(extractErrorMessage, errExt), - ) - } + projectId := model.ProjectId.ValueString() + region := model.Region.ValueString() + instanceId := model.InstanceId.ValueString() + databaseName := model.DatabaseName.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) @@ -508,23 +498,6 @@ func (r *databaseResource) ModifyPlan( return } - var identityModel DatabaseResourceIdentityModel - identityModel.ProjectID = planModel.ProjectId - identityModel.Region = planModel.Region - - if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() { - identityModel.InstanceID = planModel.InstanceId - } - - if !planModel.Name.IsNull() && !planModel.Name.IsUnknown() { - identityModel.DatabaseName = planModel.Name - } - - resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...) - if resp.Diagnostics.HasError() { - return - } - resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) if resp.Diagnostics.HasError() { return @@ -594,46 +567,3 @@ func (r *databaseResource) ImportState( tflog.Info(ctx, "sqlserverflexalpha database state imported") } - -// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model. -func (r *databaseResource) extractIdentityData( - model resourceModel, - identity DatabaseResourceIdentityModel, -) (projectId, region, instanceId, databaseName string, err error) { - if !model.Name.IsNull() && !model.Name.IsUnknown() { - databaseName = model.Name.ValueString() - } else { - if identity.DatabaseName.IsNull() || identity.DatabaseName.IsUnknown() { - return "", "", "", "", fmt.Errorf("database_name not found in config") - } - databaseName = identity.DatabaseName.ValueString() - } - - if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { - projectId = model.ProjectId.ValueString() - } else { - if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() { - return "", "", "", "", fmt.Errorf("project_id not found in config") - } - projectId = identity.ProjectID.ValueString() - } - - if !model.Region.IsNull() && !model.Region.IsUnknown() { - region = r.providerData.GetRegionWithOverride(model.Region) - } else { - if identity.Region.IsNull() || identity.Region.IsUnknown() { - return "", "", "", "", fmt.Errorf("region not found in config") - } - region = r.providerData.GetRegionWithOverride(identity.Region) - } - - if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() { - instanceId = model.InstanceId.ValueString() - } else { - if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() { - return "", "", "", "", fmt.Errorf("instance_id not found in config") - } - instanceId = identity.InstanceID.ValueString() - } - return projectId, region, instanceId, databaseName, nil -} diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go index b116b8dd..4dbe7d03 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go +++ b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go @@ -43,9 +43,12 @@ func TestMapDataSourceFields(t *testing.T) { "simple_values", &sqlserverflexalpha.GetUserResponse{ Roles: &[]string{ - "role_1", - "role_2", - "", + "##STACKIT_SQLAgentUser##", + "##STACKIT_DatabaseManager##", + "##STACKIT_LoginManager##", + "##STACKIT_SQLAgentManager##", + "##STACKIT_ProcessManager##", + "##STACKIT_ServerManager##", }, Username: utils.Ptr("username"), Host: utils.Ptr("host"), @@ -63,9 +66,12 @@ func TestMapDataSourceFields(t *testing.T) { Roles: types.List( types.SetValueMust( types.StringType, []attr.Value{ - types.StringValue(""), - types.StringValue("role_1"), - types.StringValue("role_2"), + types.StringValue("##STACKIT_DatabaseManager##"), + types.StringValue("##STACKIT_LoginManager##"), + types.StringValue("##STACKIT_ProcessManager##"), + types.StringValue("##STACKIT_SQLAgentManager##"), + types.StringValue("##STACKIT_SQLAgentUser##"), + types.StringValue("##STACKIT_ServerManager##"), }, ), ), diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper.go b/stackit/internal/services/sqlserverflexbeta/database/mapper.go index a418dadb..43a4344f 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/database/mapper.go @@ -2,7 +2,6 @@ package sqlserverflexbeta import ( "fmt" - "strings" "github.com/hashicorp/terraform-plugin-framework/types" @@ -34,7 +33,7 @@ func mapFields(source *sqlserverflexbeta.GetDatabaseResponse, model *dataSourceM model.Id = types.Int64Value(databaseId) model.DatabaseName = types.StringValue(source.GetName()) model.Name = types.StringValue(source.GetName()) - model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\"")) + model.Owner = types.StringValue(source.GetOwner()) model.Region = types.StringValue(region) model.ProjectId = types.StringValue(model.ProjectId.ValueString()) model.InstanceId = types.StringValue(model.InstanceId.ValueString()) @@ -75,7 +74,7 @@ func mapResourceFields(source *sqlserverflexbeta.GetDatabaseResponse, model *res model.Id = types.Int64Value(databaseId) model.DatabaseName = types.StringValue(source.GetName()) model.Name = types.StringValue(source.GetName()) - model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\"")) + model.Owner = types.StringValue(source.GetOwner()) model.Region = types.StringValue(region) model.ProjectId = types.StringValue(model.ProjectId.ValueString()) model.InstanceId = types.StringValue(model.InstanceId.ValueString()) diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 8eb93b54..2f77191e 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -247,7 +247,6 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - // TODO: is this necessary to wait for the database-> API say 200 ? waitResp, err := wait.CreateDatabaseWaitHandler( ctx, r.client, @@ -310,19 +309,8 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques return } - database, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() - if err != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - "Error creating database", - fmt.Sprintf("Getting database details after creation: %v", err), - ) - return - } - // Map response body to schema - err = mapResourceFields(database, &data, region) + err = mapResourceFields(waitResp, &data, region) if err != nil { core.LogAndAddError( ctx, @@ -361,15 +349,10 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r ctx = core.InitProviderContext(ctx) - projectId, region, instanceId, databaseName, errExt := r.extractIdentityData(model, identityData) - if errExt != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - extractErrorSummary, - fmt.Sprintf(extractErrorMessage, errExt), - ) - } + projectId := model.ProjectId.ValueString() + region := model.Region.ValueString() + instanceId := model.InstanceId.ValueString() + databaseName := model.DatabaseName.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go index 0f0b47ea..3beb0da8 100644 --- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go @@ -302,9 +302,9 @@ func TestAccInstanceNoEncryption(t *testing.T) { "##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##", "##STACKIT_ProcessManager##", - "##STACKIT_ServerManager##", "##STACKIT_SQLAgentManager##", "##STACKIT_SQLAgentUser##", + "##STACKIT_ServerManager##", }, }, } @@ -402,7 +402,7 @@ func TestAccInstanceEncryption(t *testing.T) { { Name: userName, ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), - Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, + Roles: []string{"##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"}, }, } data.Databases = []Database{ diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper.go b/stackit/internal/services/sqlserverflexbeta/user/mapper.go index 54ba2547..ca916d28 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/mapper.go +++ b/stackit/internal/services/sqlserverflexbeta/user/mapper.go @@ -94,8 +94,8 @@ func mapFields(userResp *sqlserverflexbeta.GetUserResponse, model *resourceModel model.Username = types.StringPointerValue(user.Username) // Map roles - if user.Roles != nil { - resRoles := *user.Roles + if userResp.Roles != nil { + resRoles := *userResp.Roles slices.Sort(resRoles) var roles []attr.Value @@ -103,11 +103,11 @@ func mapFields(userResp *sqlserverflexbeta.GetUserResponse, model *resourceModel roles = append(roles, types.StringValue(string(role))) } - rolesSet, diags := types.SetValue(types.StringType, roles) + rolesSet, diags := types.ListValue(types.StringType, roles) if diags.HasError() { return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) } - model.Roles = types.List(rolesSet) + model.Roles = rolesSet } // Ensure roles is not null @@ -153,11 +153,11 @@ func mapFieldsCreate(userResp *sqlserverflexbeta.CreateUserResponse, model *reso for _, role := range resRoles { roles = append(roles, types.StringValue(string(role))) } - rolesSet, diags := types.SetValue(types.StringType, roles) + rolesList, diags := types.ListValue(types.StringType, roles) if diags.HasError() { return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags)) } - model.Roles = types.List(rolesSet) + model.Roles = rolesList } if model.Roles.IsNull() || model.Roles.IsUnknown() { diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index 9159cf01..f960c726 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -11,7 +11,6 @@ import ( "strings" "time" - "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema" @@ -108,37 +107,25 @@ func (r *userResource) ModifyPlan( return } - // TODO: verify if this is needed - START - var configRoles []string - diags := configModel.Roles.ElementsAs(ctx, &configRoles, false) - resp.Diagnostics.Append(diags...) - if diags.HasError() { - return - } - - var planRoles []string - diags = planModel.Roles.ElementsAs(ctx, &planRoles, false) - resp.Diagnostics.Append(diags...) - if diags.HasError() { - return - } - - slices.Sort(configRoles) - slices.Sort(planRoles) - - if !slices.Equal(configRoles, planRoles) { - var roles []attr.Value - for _, role := range configRoles { - roles = append(roles, types.StringValue(string(role))) - } - rolesSet, diags := types.SetValue(types.StringType, roles) - resp.Diagnostics.Append(diags...) - if diags.HasError() { - return - } - planModel.Roles = types.List(rolesSet) - } - // TODO: verify if this is needed - END + //// TODO: verify if this is needed - START + //var planRoles []string + //diags := planModel.Roles.ElementsAs(ctx, &planRoles, false) + //resp.Diagnostics.Append(diags...) + //if diags.HasError() { + // return + //} + //slices.Sort(planRoles) + //var roles []attr.Value + //for _, role := range planRoles { + // roles = append(roles, types.StringValue(string(role))) + //} + //rolesSet, diags := types.ListValue(types.StringType, roles) + //resp.Diagnostics.Append(diags...) + //if diags.HasError() { + // return + //} + //planModel.Roles = rolesSet + //// TODO: verify if this is needed - END resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) if resp.Diagnostics.HasError() { diff --git a/stackit/provider.go b/stackit/provider.go index ab3dd060..bb9ffad0 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -489,17 +489,18 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, return } - roundTripper := core.NewRetryRoundTripper( - baseRoundTripper, - maxRetries, - initialDelay, - maxDelay, - perTryTimeout, - ) + //roundTripper := core.NewRetryRoundTripper( + // baseRoundTripper, + // maxRetries, + // initialDelay, + // maxDelay, + // perTryTimeout, + //) // Make round tripper and custom endpoints available during DataSource and Resource // type Configure methods. - providerData.RoundTripper = roundTripper + // providerData.RoundTripper = roundTripper + providerData.RoundTripper = baseRoundTripper resp.DataSourceData = providerData resp.ResourceData = providerData From 89a24ce78025300e28910ed74ea98945b89a4c4a Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Mon, 16 Feb 2026 19:09:42 +0000 Subject: [PATCH 34/41] fix: try fix errors (#73) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/73 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- cmd/cmd/examplesCmd.go | 115 ++++++++ cmd/main.go | 3 +- go.mod | 32 +-- go.sum | 41 ++- .../sqlserverflex_acc_test.go | 4 +- .../sqlserverflexbeta/instance/functions.go | 17 +- .../instance/functions_test.go | 253 ++++++++++++++++++ .../sqlserverflexbeta/instance/resource.go | 6 + stackit/provider_acc_test.go | 2 + 9 files changed, 441 insertions(+), 32 deletions(-) create mode 100644 cmd/cmd/examplesCmd.go create mode 100644 stackit/internal/services/sqlserverflexbeta/instance/functions_test.go diff --git a/cmd/cmd/examplesCmd.go b/cmd/cmd/examplesCmd.go new file mode 100644 index 00000000..6c95a799 --- /dev/null +++ b/cmd/cmd/examplesCmd.go @@ -0,0 +1,115 @@ +package cmd + +import ( + "fmt" + "os" + "path" + + "github.com/spf13/cobra" +) + +var examplesCmd = &cobra.Command{ + Use: "examples", + Short: "create examples", + Long: `...`, + RunE: func(cmd *cobra.Command, args []string) error { + + //filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go" + // + //src, err := os.ReadFile(filePathStr) + //if err != nil { + // return err + //} + // + //i := interp.New( + // interp.Options{ + // GoPath: "/home/henselinm/.asdf/installs/golang/1.25.6/packages", + // BuildTags: nil, + // Stdin: nil, + // Stdout: nil, + // Stderr: nil, + // Args: nil, + // Env: nil, + // SourcecodeFilesystem: nil, + // Unrestricted: false, + // }, + //) + //err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators")) + //if err != nil { + // return err + //} + //err = i.Use(stdlib.Symbols) + //if err != nil { + // return err + //} + //_, err = i.Eval(string(src)) + //if err != nil { + // return err + //} + // + //v, err := i.Eval("DatabaseDataSourceSchema") + //if err != nil { + // return err + //} + // + //bar := v.Interface().(func(string) string) + // + //r := bar("Kung") + //println(r) + // + //evalPath, err := i.EvalPath(filePathStr) + //if err != nil { + // return err + //} + // + //fmt.Printf("%+v\n", evalPath) + + //_, err = i.Eval(`import "fmt"`) + //if err != nil { + // return err + //} + //_, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`) + //if err != nil { + // return err + //} + + //v = i.Symbols("Hallo") + + // fmt.Println(v) + return workServices() + }, +} + +func workServices() error { + startPath := path.Join("stackit", "internal", "services") + + services, err := os.ReadDir(startPath) + if err != nil { + return err + } + + for _, entry := range services { + if !entry.IsDir() { + continue + } + resources, err := os.ReadDir(path.Join(startPath, entry.Name())) + if err != nil { + return err + } + for _, res := range resources { + if !res.IsDir() { + continue + } + fmt.Println("Gefunden:", startPath, "subdir", entry.Name(), "resource", res.Name()) + } + } + return nil +} + +func NewExamplesCmd() *cobra.Command { + return examplesCmd +} + +//func init() { // nolint: gochecknoinits +// examplesCmd.Flags().BoolVarP(&example, "example", "e", false, "example") +//} diff --git a/cmd/main.go b/cmd/main.go index 016d9a5b..19fb38e0 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -5,7 +5,7 @@ import ( "log/slog" "os" - "github.com/MatusOllah/slogcolor" + "github.com/SladkyCitron/slogcolor" cc "github.com/ivanpirog/coloredcobra" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd" @@ -30,6 +30,7 @@ func main() { cmd.NewBuildCmd(), cmd.NewPublishCmd(), cmd.NewGetFieldsCmd(), + cmd.NewExamplesCmd(), ) err := rootCmd.Execute() diff --git a/go.mod b/go.mod index 24688013..9f74c926 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stac go 1.25.6 require ( - github.com/MatusOllah/slogcolor v1.7.0 + github.com/SladkyCitron/slogcolor v1.8.0 github.com/google/go-cmp v0.7.0 github.com/google/uuid v1.6.0 github.com/hashicorp/terraform-plugin-framework v1.17.0 @@ -16,7 +16,7 @@ require ( github.com/joho/godotenv v1.5.1 github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1 github.com/spf13/cobra v1.10.2 - github.com/stackitcloud/stackit-sdk-go/core v0.21.0 + github.com/stackitcloud/stackit-sdk-go/core v0.21.1 github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha github.com/teambition/rrule-go v1.8.2 gopkg.in/yaml.v3 v3.0.1 @@ -24,7 +24,7 @@ require ( require ( github.com/hashicorp/go-retryablehttp v0.7.8 // indirect - golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 // indirect + golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1 // indirect ) require ( @@ -32,9 +32,9 @@ require ( github.com/ProtonMail/go-crypto v1.3.0 // indirect github.com/agext/levenshtein v1.2.3 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect - github.com/cloudflare/circl v1.6.2 // indirect + github.com/cloudflare/circl v1.6.3 // indirect github.com/fatih/color v1.18.0 // indirect - github.com/golang-jwt/jwt/v5 v5.3.0 // indirect + github.com/golang-jwt/jwt/v5 v5.3.1 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect @@ -45,12 +45,12 @@ require ( github.com/hashicorp/go-plugin v1.7.0 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-version v1.8.0 // indirect - github.com/hashicorp/hc-install v0.9.2 // indirect + github.com/hashicorp/hc-install v0.9.3 // indirect github.com/hashicorp/hcl/v2 v2.24.0 // indirect github.com/hashicorp/logutils v1.0.0 // indirect - github.com/hashicorp/terraform-exec v0.24.0 // indirect + github.com/hashicorp/terraform-exec v0.25.0 // indirect github.com/hashicorp/terraform-json v0.27.2 // indirect - github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 // indirect + github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect github.com/hashicorp/terraform-registry-address v0.4.0 // indirect github.com/hashicorp/terraform-svchost v0.2.0 // indirect github.com/hashicorp/yamux v0.1.2 // indirect @@ -70,16 +70,16 @@ require ( github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/zclconf/go-cty v1.17.0 // indirect - golang.org/x/crypto v0.47.0 // indirect - golang.org/x/mod v0.32.0 // indirect - golang.org/x/net v0.49.0 // indirect + golang.org/x/crypto v0.48.0 // indirect + golang.org/x/mod v0.33.0 // indirect + golang.org/x/net v0.50.0 // indirect golang.org/x/sync v0.19.0 // indirect - golang.org/x/sys v0.40.0 // indirect - golang.org/x/text v0.33.0 // indirect - golang.org/x/tools v0.41.0 // indirect + golang.org/x/sys v0.41.0 // indirect + golang.org/x/text v0.34.0 // indirect + golang.org/x/tools v0.42.0 // indirect google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect - google.golang.org/grpc v1.78.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 // indirect + google.golang.org/grpc v1.79.1 // indirect google.golang.org/protobuf v1.36.11 // indirect ) diff --git a/go.sum b/go.sum index 23dcc47b..ba4fe023 100644 --- a/go.sum +++ b/go.sum @@ -1,11 +1,11 @@ dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= -github.com/MatusOllah/slogcolor v1.7.0 h1:Nrd7yBPv2EBEEBEwl7WEPRmMd1ozZzw2jm8SLMYDbKs= -github.com/MatusOllah/slogcolor v1.7.0/go.mod h1:5y1H50XuQIBvuYTJlmokWi+4FuPiJN5L7Z0jM4K4bYA= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw= github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= +github.com/SladkyCitron/slogcolor v1.8.0 h1:ln4mUPfVhs7a/vZfjnKkz5YZ71Bg/KFWneS2hfFq6FM= +github.com/SladkyCitron/slogcolor v1.8.0/go.mod h1:ft8LEVIl4isUkebakhv+ngNXJjWBumnwhXfxTLApf3M= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= @@ -15,6 +15,8 @@ github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/ github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= github.com/cloudflare/circl v1.6.2 h1:hL7VBpHHKzrV5WTfHCaBsgx/HGbBYlgrwvNXEVDYYsQ= github.com/cloudflare/circl v1.6.2/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= +github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8= +github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= @@ -35,6 +37,7 @@ github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UN github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60= github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k= +github.com/go-git/go-git/v5 v5.16.5 h1:mdkuqblwr57kVfXri5TTH+nMFLNUxIj9Z7F5ykFbw5s= github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= @@ -43,6 +46,8 @@ github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY= +github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -81,12 +86,16 @@ github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bP github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/hc-install v0.9.2 h1:v80EtNX4fCVHqzL9Lg/2xkp62bbvQMnvPQ0G+OmtO24= github.com/hashicorp/hc-install v0.9.2/go.mod h1:XUqBQNnuT4RsxoxiM9ZaUk0NX8hi2h+Lb6/c0OZnC/I= +github.com/hashicorp/hc-install v0.9.3 h1:1H4dgmgzxEVwT6E/d/vIL5ORGVKz9twRwDw+qA5Hyho= +github.com/hashicorp/hc-install v0.9.3/go.mod h1:FQlQ5I3I/X409N/J1U4pPeQQz1R3BoV0IysB7aiaQE0= github.com/hashicorp/hcl/v2 v2.24.0 h1:2QJdZ454DSsYGoaE6QheQZjtKZSUs9Nh2izTWiwQxvE= github.com/hashicorp/hcl/v2 v2.24.0/go.mod h1:oGoO1FIQYfn/AgyOhlg9qLC6/nOJPX3qGbkZpYAcqfM= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/terraform-exec v0.24.0 h1:mL0xlk9H5g2bn0pPF6JQZk5YlByqSqrO5VoaNtAf8OE= github.com/hashicorp/terraform-exec v0.24.0/go.mod h1:lluc/rDYfAhYdslLJQg3J0oDqo88oGQAdHR+wDqFvo4= +github.com/hashicorp/terraform-exec v0.25.0 h1:Bkt6m3VkJqYh+laFMrWIpy9KHYFITpOyzRMNI35rNaY= +github.com/hashicorp/terraform-exec v0.25.0/go.mod h1:dl9IwsCfklDU6I4wq9/StFDp7dNbH/h5AnfS1RmiUl8= github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU= github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE= github.com/hashicorp/terraform-plugin-framework v1.17.0 h1:JdX50CFrYcYFY31gkmitAEAzLKoBgsK+iaJjDC8OexY= @@ -99,6 +108,8 @@ github.com/hashicorp/terraform-plugin-log v0.10.0 h1:eu2kW6/QBVdN4P3Ju2WiB2W3Obj github.com/hashicorp/terraform-plugin-log v0.10.0/go.mod h1:/9RR5Cv2aAbrqcTSdNmY1NRHP4E3ekrXRGjqORpXyB0= github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 h1:mlAq/OrMlg04IuJT7NpefI1wwtdpWudnEmjuQs04t/4= github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1/go.mod h1:GQhpKVvvuwzD79e8/NZ+xzj+ZpWovdPAe8nfV/skwNU= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 h1:sy0Bc4A/GZNdmwpVX/Its9aIweCfY9fRfY1IgmXkOj8= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2/go.mod h1:MQisArXYCowb/5q4lDS/BWp5KnXiZ4lxOIyrpKBpUBE= github.com/hashicorp/terraform-plugin-testing v1.14.0 h1:5t4VKrjOJ0rg0sVuSJ86dz5K7PHsMO6OKrHFzDBerWA= github.com/hashicorp/terraform-plugin-testing v1.14.0/go.mod h1:1qfWkecyYe1Do2EEOK/5/WnTyvC8wQucUkkhiGLg5nk= github.com/hashicorp/terraform-registry-address v0.4.0 h1:S1yCGomj30Sao4l5BMPjTGZmCNzuv7/GDTDX99E9gTk= @@ -172,6 +183,8 @@ github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stackitcloud/stackit-sdk-go/core v0.21.0 h1:QXZqiaO7U/4IpTkJfzt4dt6QxJzG2uUS12mBnHpYNik= github.com/stackitcloud/stackit-sdk-go/core v0.21.0/go.mod h1:fqto7M82ynGhEnpZU6VkQKYWYoFG5goC076JWXTUPRQ= +github.com/stackitcloud/stackit-sdk-go/core v0.21.1 h1:Y/PcAgM7DPYMNqum0MLv4n1mF9ieuevzcCIZYQfm3Ts= +github.com/stackitcloud/stackit-sdk-go/core v0.21.1/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI= github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha h1:ugpMOMUZGB0yXsWcfe97F7GCdjlexbjFuGD8ZeyMSts= github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h1:v5VGvTxLcCdJJmblbhqYalt/MFHcElDfYoy15CMhaWs= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -198,28 +211,39 @@ go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= +go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48= go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= +go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0= go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18= go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= +go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8= go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= +go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= +golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= +golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c= golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU= +golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8= +golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= +golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60= +golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -238,23 +262,32 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 h1:i0p03B68+xC1kD2QUO8JzDTPXCzhN56OLJ+IhHY8U3A= golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5/go.mod h1:b7fPSJ0pKZ3ccUh8gnTONJxhn3c/PS6tyzQvyqw4iA8= +golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1 h1:QNaHp8YvpPswfDNxlCmJyeesxbGOgaKf41iT9/QrErY= +golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1/go.mod h1:NuITXsA9cTiqnXtVk+/wrBT2Ja4X5hsfGOYRJ6kgYjs= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= +golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= +golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= +golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc= golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg= +golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k= +golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= @@ -264,8 +297,12 @@ google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAs google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 h1:mWPCjDEyshlQYzBpMNHaEof6UX1PmHcaUODUywQ0uac= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= +google.golang.org/grpc v1.79.1 h1:zGhSi45ODB9/p3VAawt9a+O/MULLl9dpizzNNpq7flY= +google.golang.org/grpc v1.79.1/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go index d638bae2..8658f0ce 100644 --- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go @@ -241,9 +241,9 @@ func TestAccInstanceNoEncryption(t *testing.T) { "##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##", "##STACKIT_ProcessManager##", - "##STACKIT_ServerManager##", "##STACKIT_SQLAgentManager##", "##STACKIT_SQLAgentUser##", + "##STACKIT_ServerManager##", }, }, } @@ -341,7 +341,7 @@ func TestAccInstanceEncryption(t *testing.T) { { Name: userName, ProjectId: os.Getenv("TF_ACC_PROJECT_ID"), - Roles: []string{"##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"}, + Roles: []string{"##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"}, }, } data.Databases = []Database{ diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go index 3893279a..a82eef23 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go @@ -208,17 +208,12 @@ func toCreatePayload( } var encryptionPayload *sqlserverflexbeta.CreateInstanceRequestPayloadGetEncryptionArgType = nil - if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() && - !model.Encryption.KekKeyId.IsNull() && model.Encryption.KekKeyId.IsUnknown() && model.Encryption.KekKeyId.ValueString() != "" && - !model.Encryption.KekKeyRingId.IsNull() && !model.Encryption.KekKeyRingId.IsUnknown() && model.Encryption.KekKeyRingId.ValueString() != "" && - !model.Encryption.KekKeyVersion.IsNull() && !model.Encryption.KekKeyVersion.IsUnknown() && model.Encryption.KekKeyVersion.ValueString() != "" && - !model.Encryption.ServiceAccount.IsNull() && !model.Encryption.ServiceAccount.IsUnknown() && model.Encryption.ServiceAccount.ValueString() != "" { - encryptionPayload = &sqlserverflexbeta.CreateInstanceRequestPayloadGetEncryptionArgType{ - KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(), - KekKeyRingId: model.Encryption.KekKeyVersion.ValueStringPointer(), - KekKeyVersion: model.Encryption.KekKeyRingId.ValueStringPointer(), - ServiceAccount: model.Encryption.ServiceAccount.ValueStringPointer(), - } + if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() { + encryptionPayload = &sqlserverflexbeta.CreateInstanceRequestPayloadGetEncryptionArgType{} + encryptionPayload.KekKeyId = model.Encryption.KekKeyId.ValueStringPointer() + encryptionPayload.KekKeyRingId = model.Encryption.KekKeyRingId.ValueStringPointer() + encryptionPayload.KekKeyVersion = model.Encryption.KekKeyVersion.ValueStringPointer() + encryptionPayload.ServiceAccount = model.Encryption.ServiceAccount.ValueStringPointer() } networkPayload := &sqlserverflexbeta.CreateInstanceRequestPayloadGetNetworkArgType{} diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go b/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go new file mode 100644 index 00000000..bf36c40e --- /dev/null +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go @@ -0,0 +1,253 @@ +package sqlserverflexbeta + +import ( + "context" + "reflect" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + sqlserverflexbetaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" + sqlserverflexbetaRs "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen" +) + +func Test_handleDSEncryption(t *testing.T) { + type args struct { + m *dataSourceModel + resp *sqlserverflexbetaPkgGen.GetInstanceResponse + } + tests := []struct { + name string + args args + want sqlserverflexbetaRs.EncryptionValue + }{ + // TODO: Add test cases. + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := handleDSEncryption(tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) { + t.Errorf("handleDSEncryption() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_handleEncryption(t *testing.T) { + type args struct { + m *sqlserverflexbetaRs.InstanceModel + resp *sqlserverflexbetaPkgGen.GetInstanceResponse + } + tests := []struct { + name string + args args + want sqlserverflexbetaRs.EncryptionValue + }{ + { + name: "nil response", + args: args{ + m: &sqlserverflexbetaRs.InstanceModel{}, + resp: &sqlserverflexbetaPkgGen.GetInstanceResponse{}, + }, + want: sqlserverflexbetaRs.EncryptionValue{}, + }, + { + name: "nil response", + args: args{ + m: &sqlserverflexbetaRs.InstanceModel{}, + resp: &sqlserverflexbetaPkgGen.GetInstanceResponse{ + Encryption: &sqlserverflexbetaPkgGen.InstanceEncryption{}, + }, + }, + want: sqlserverflexbetaRs.NewEncryptionValueNull(), + }, + { + name: "response with values", + args: args{ + m: &sqlserverflexbetaRs.InstanceModel{}, + resp: &sqlserverflexbetaPkgGen.GetInstanceResponse{ + Encryption: &sqlserverflexbetaPkgGen.InstanceEncryption{ + KekKeyId: utils.Ptr("kek_key_id"), + KekKeyRingId: utils.Ptr("kek_key_ring_id"), + KekKeyVersion: utils.Ptr("kek_key_version"), + ServiceAccount: utils.Ptr("kek_svc_acc"), + }, + }, + }, + want: sqlserverflexbetaRs.EncryptionValue{ + KekKeyId: types.StringValue("kek_key_id"), + KekKeyRingId: types.StringValue("kek_key_ring_id"), + KekKeyVersion: types.StringValue("kek_key_version"), + ServiceAccount: types.StringValue("kek_svc_acc"), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := handleEncryption(tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) { + t.Errorf("handleEncryption() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_mapDataResponseToModel(t *testing.T) { + type args struct { + ctx context.Context + resp *sqlserverflexbetaPkgGen.GetInstanceResponse + m *dataSourceModel + tfDiags diag.Diagnostics + } + tests := []struct { + name string + args args + wantErr bool + }{ + // TODO: Add test cases. + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := mapDataResponseToModel(tt.args.ctx, tt.args.resp, tt.args.m, tt.args.tfDiags); (err != nil) != tt.wantErr { + t.Errorf("mapDataResponseToModel() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func Test_mapResponseToModel(t *testing.T) { + type args struct { + ctx context.Context + resp *sqlserverflexbetaPkgGen.GetInstanceResponse + m *sqlserverflexbetaRs.InstanceModel + tfDiags diag.Diagnostics + } + tests := []struct { + name string + args args + wantErr bool + }{ + // TODO: Add test cases. + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := mapResponseToModel(tt.args.ctx, tt.args.resp, tt.args.m, tt.args.tfDiags); (err != nil) != tt.wantErr { + t.Errorf("mapResponseToModel() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func Test_toCreatePayload(t *testing.T) { + type args struct { + ctx context.Context + model *sqlserverflexbetaRs.InstanceModel + } + tests := []struct { + name string + args args + want *sqlserverflexbetaPkgGen.CreateInstanceRequestPayload + wantErr bool + }{ + { + name: "simple", + args: args{ + ctx: context.Background(), + model: &sqlserverflexbetaRs.InstanceModel{ + Encryption: sqlserverflexbetaRs.NewEncryptionValueMust( + sqlserverflexbetaRs.EncryptionValue{}.AttributeTypes(context.Background()), + map[string]attr.Value{ + "kek_key_id": types.StringValue("kek_key_id"), + "kek_key_ring_id": types.StringValue("kek_key_ring_id"), + "kek_key_version": types.StringValue("kek_key_version"), + "service_account": types.StringValue("sacc"), + }, + ), + Storage: sqlserverflexbetaRs.StorageValue{}, + }, + }, + want: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayload{ + BackupSchedule: nil, + Encryption: &sqlserverflexbetaPkgGen.InstanceEncryption{ + KekKeyId: utils.Ptr("kek_key_id"), + KekKeyRingId: utils.Ptr("kek_key_ring_id"), + KekKeyVersion: utils.Ptr("kek_key_version"), + ServiceAccount: utils.Ptr("sacc"), + }, + FlavorId: nil, + Name: nil, + Network: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadNetwork{}, + RetentionDays: nil, + Storage: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadGetStorageArgType{}, + Version: nil, + }, + wantErr: false, + }, + { + name: "nil object", + args: args{ + ctx: context.Background(), + model: &sqlserverflexbetaRs.InstanceModel{ + Encryption: sqlserverflexbetaRs.NewEncryptionValueNull(), + Storage: sqlserverflexbetaRs.StorageValue{}, + }, + }, + want: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayload{ + BackupSchedule: nil, + Encryption: nil, + FlavorId: nil, + Name: nil, + Network: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadNetwork{}, + RetentionDays: nil, + Storage: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadGetStorageArgType{}, + Version: nil, + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := toCreatePayload(tt.args.ctx, tt.args.model) + if (err != nil) != tt.wantErr { + t.Errorf("toCreatePayload() error = %v, wantErr %v", err, tt.wantErr) + return + } + if diff := cmp.Diff(tt.want, got); diff != "" { + t.Errorf("model mismatch (-want +got):\n%s", diff) + } + //if !reflect.DeepEqual(got, tt.want) { + // t.Errorf("toCreatePayload() got = %v, want %v", got, tt.want) + //} + }) + } +} + +func Test_toUpdatePayload(t *testing.T) { + type args struct { + ctx context.Context + m *sqlserverflexbetaRs.InstanceModel + resp *resource.UpdateResponse + } + tests := []struct { + name string + args args + want *sqlserverflexbetaPkgGen.UpdateInstanceRequestPayload + wantErr bool + }{ + // TODO: Add test cases. + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := toUpdatePayload(tt.args.ctx, tt.args.m, tt.args.resp) + if (err != nil) != tt.wantErr { + t.Errorf("toUpdatePayload() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("toUpdatePayload() got = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go index 404f665b..6cc07de1 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -207,6 +207,12 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques ) return } + + fmt.Printf("\n\n\n >>>>>> DEBUG <<<<< \n%+v\n\n\n", payload) + tflog.Debug(ctx, "\n\n\n >>>>>> DEBUG <<<<< \n\n\n", map[string]interface{}{ + "encryption": payload.Encryption, + }) + // Create new Instance createResp, err := r.client.CreateInstanceRequest( ctx, diff --git a/stackit/provider_acc_test.go b/stackit/provider_acc_test.go index 0e92fa5a..854f00a7 100644 --- a/stackit/provider_acc_test.go +++ b/stackit/provider_acc_test.go @@ -57,6 +57,7 @@ func TestMain(m *testing.M) { } func TestAccEnvVarServiceAccountPathValid(t *testing.T) { + t.Skip("needs refactoring") // Check if acceptance tests should be run if v := os.Getenv(resource.EnvTfAcc); v == "" { t.Skipf( @@ -157,6 +158,7 @@ func TestAccProviderConfigureValidValues(t *testing.T) { } func TestAccProviderConfigureAnInvalidValue(t *testing.T) { + t.Skip("needs refactoring") // Check if acceptance tests should be run if v := os.Getenv(resource.EnvTfAcc); v == "" { t.Skipf( From aba831cbddc0e97ef3034f65cc539bd52eec2c48 Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Mon, 16 Feb 2026 19:49:02 +0000 Subject: [PATCH 35/41] fix: some_fixes (#74) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/74 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- cmd/cmd/publish/templates/Caddyfile | 38 ++ docs/data-sources/sqlserverflexbeta_user.md | 54 ++ docs/resources/sqlserverflexbeta_user.md | 38 ++ .../postgresflexalpha/database/resource.go | 1 + .../postgresflexalpha/instance/functions.go | 23 - .../postgresflexalpha/user/planModifiers.yaml | 9 + .../version_data_source_gen.go | 569 ++++++++++++++++++ .../database/planModifiers.yaml | 5 + .../sqlserverflexbeta/database/resource.go | 2 +- .../sqlserverflexbeta/instance/resource.go | 5 - 10 files changed, 715 insertions(+), 29 deletions(-) create mode 100644 cmd/cmd/publish/templates/Caddyfile create mode 100644 docs/data-sources/sqlserverflexbeta_user.md create mode 100644 docs/resources/sqlserverflexbeta_user.md create mode 100644 stackit/internal/services/sqlserverflexalpha/versions/datasources_gen/version_data_source_gen.go diff --git a/cmd/cmd/publish/templates/Caddyfile b/cmd/cmd/publish/templates/Caddyfile new file mode 100644 index 00000000..5663fbf8 --- /dev/null +++ b/cmd/cmd/publish/templates/Caddyfile @@ -0,0 +1,38 @@ +{ + log { + level debug + } + + + filesystem tf s3 { + bucket "terraform-provider-privatepreview" + region eu01 + endpoint https://object.storage.eu01.onstackit.cloud + use_path_style + } +} + +tfregistry.sysops.stackit.rocks { + encode zstd gzip + + handle_path /docs/* { + root /srv/www + templates + + @md { + file {path} + path *.md + } + + rewrite @md /markdown.html + + file_server { + browse + } + } + + file_server { + fs tf + browse + } +} diff --git a/docs/data-sources/sqlserverflexbeta_user.md b/docs/data-sources/sqlserverflexbeta_user.md new file mode 100644 index 00000000..f87f454e --- /dev/null +++ b/docs/data-sources/sqlserverflexbeta_user.md @@ -0,0 +1,54 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview" +subcategory: "" +description: |- + +--- + +# stackitprivatepreview_sqlserverflexbeta_user (Data Source) + + + + + + +## Schema + +### Required + +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed + +### Optional + +- `page` (Number) Number of the page of items list to be returned. +- `size` (Number) Number of items to be returned on each page. +- `sort` (String) Sorting of the users to be returned on each page. + +### Read-Only + +- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination)) +- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users)) + + +### Nested Schema for `pagination` + +Read-Only: + +- `page` (Number) +- `size` (Number) +- `sort` (String) +- `total_pages` (Number) +- `total_rows` (Number) + + + +### Nested Schema for `users` + +Read-Only: + +- `status` (String) The current status of the user. +- `tf_original_api_id` (Number) The ID of the user. +- `username` (String) The name of the user. diff --git a/docs/resources/sqlserverflexbeta_user.md b/docs/resources/sqlserverflexbeta_user.md new file mode 100644 index 00000000..ea1577a8 --- /dev/null +++ b/docs/resources/sqlserverflexbeta_user.md @@ -0,0 +1,38 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview" +subcategory: "" +description: |- + +--- + +# stackitprivatepreview_sqlserverflexbeta_user (Resource) + + + + + + +## Schema + +### Required + +- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint. +- `username` (String) The name of the user. + +### Optional + +- `default_database` (String) The default database for a user of the instance. +- `instance_id` (String) The ID of the instance. +- `project_id` (String) The STACKIT project ID. +- `region` (String) The region which should be addressed +- `user_id` (Number) The ID of the user. + +### Read-Only + +- `host` (String) The host of the instance in which the user belongs to. +- `id` (Number) The ID of the user. +- `password` (String) The password for the user. +- `port` (Number) The port of the instance in which the user belongs to. +- `status` (String) The current status of the user. +- `uri` (String) The connection string for the user to the instance. diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index 1cf3f2df..4e3dc936 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -321,6 +321,7 @@ func (r *databaseResource) Read( return } + // TODO: use values from api to identify drift // Save identity into Terraform state identity := DatabaseResourceIdentityModel{ ProjectID: types.StringValue(projectId), diff --git a/stackit/internal/services/postgresflexalpha/instance/functions.go b/stackit/internal/services/postgresflexalpha/instance/functions.go index eafbb3b5..133be314 100644 --- a/stackit/internal/services/postgresflexalpha/instance/functions.go +++ b/stackit/internal/services/postgresflexalpha/instance/functions.go @@ -7,7 +7,6 @@ import ( "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types/basetypes" - "github.com/hashicorp/terraform-plugin-log/tflog" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" postgresflexalphadatasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen" @@ -20,28 +19,6 @@ func mapGetInstanceResponseToModel( m *postgresflexalpharesource.InstanceModel, resp *postgresflex.GetInstanceResponse, ) error { - tflog.Debug( - ctx, ">>>> MSH DEBUG <<<<", map[string]interface{}{ - "id": m.Id.ValueString(), - "instance_id": m.InstanceId.ValueString(), - "backup_schedule": m.BackupSchedule.ValueString(), - "flavor_id": m.FlavorId.ValueString(), - "encryption.kek_key_id": m.Encryption.KekKeyId.ValueString(), - "encryption.kek_key_ring_id": m.Encryption.KekKeyRingId.ValueString(), - "encryption.kek_key_version": m.Encryption.KekKeyVersion.ValueString(), - "encryption.service_account": m.Encryption.ServiceAccount.ValueString(), - "is_deletable": m.IsDeletable.ValueBool(), - "name": m.Name.ValueString(), - "status": m.Status.ValueString(), - "retention_days": m.RetentionDays.ValueInt64(), - "replicas": m.Replicas.ValueInt64(), - "network.instance_address": m.Network.InstanceAddress.ValueString(), - "network.router_address": m.Network.RouterAddress.ValueString(), - "version": m.Version.ValueString(), - "network.acl": m.Network.Acl.String(), - }, - ) - m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) m.Encryption = postgresflexalpharesource.NewEncryptionValueNull() if resp.HasEncryption() { diff --git a/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml b/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml index a7d4cde6..e0822704 100644 --- a/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml +++ b/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml @@ -2,10 +2,12 @@ fields: - name: 'id' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'user_id' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'instance_id' validators: @@ -13,6 +15,7 @@ fields: - validate.UUID modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'project_id' validators: @@ -32,24 +35,30 @@ fields: - name: 'password' modifiers: + - 'RequiresReplace' - 'UseStateForUnknown' - name: 'host' modifiers: + - 'RequiresReplace' - 'UseStateForUnknown' - name: 'port' modifiers: + - 'RequiresReplace' - 'UseStateForUnknown' - name: 'region' modifiers: - 'RequiresReplace' + - 'RequiresReplace' - name: 'status' modifiers: + - 'RequiresReplace' - 'UseStateForUnknown' - name: 'connection_string' modifiers: + - 'RequiresReplace' - 'UseStateForUnknown' diff --git a/stackit/internal/services/sqlserverflexalpha/versions/datasources_gen/version_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/versions/datasources_gen/version_data_source_gen.go new file mode 100644 index 00000000..cb9008f1 --- /dev/null +++ b/stackit/internal/services/sqlserverflexalpha/versions/datasources_gen/version_data_source_gen.go @@ -0,0 +1,569 @@ +// Code generated by terraform-plugin-framework-generator DO NOT EDIT. + +package sqlserverflexalpha + +import ( + "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func VersionDataSourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "project_id": schema.StringAttribute{ + Required: true, + Description: "The STACKIT project ID.", + MarkdownDescription: "The STACKIT project ID.", + }, + "region": schema.StringAttribute{ + Required: true, + Description: "The region which should be addressed", + MarkdownDescription: "The region which should be addressed", + Validators: []validator.String{ + stringvalidator.OneOf( + "eu01", + ), + }, + }, + "versions": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "beta": schema.BoolAttribute{ + Computed: true, + Description: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.", + MarkdownDescription: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.", + }, + "deprecated": schema.StringAttribute{ + Computed: true, + Description: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.", + MarkdownDescription: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.", + }, + "recommend": schema.BoolAttribute{ + Computed: true, + Description: "Flag if the version is recommend by the STACKIT Team.", + MarkdownDescription: "Flag if the version is recommend by the STACKIT Team.", + }, + "version": schema.StringAttribute{ + Computed: true, + Description: "The sqlserver version used for the instance.", + MarkdownDescription: "The sqlserver version used for the instance.", + }, + }, + CustomType: VersionsType{ + ObjectType: types.ObjectType{ + AttrTypes: VersionsValue{}.AttributeTypes(ctx), + }, + }, + }, + Computed: true, + Description: "A list containing available sqlserver versions.", + MarkdownDescription: "A list containing available sqlserver versions.", + }, + }, + } +} + +type VersionModel struct { + ProjectId types.String `tfsdk:"project_id"` + Region types.String `tfsdk:"region"` + Versions types.List `tfsdk:"versions"` +} + +var _ basetypes.ObjectTypable = VersionsType{} + +type VersionsType struct { + basetypes.ObjectType +} + +func (t VersionsType) Equal(o attr.Type) bool { + other, ok := o.(VersionsType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t VersionsType) String() string { + return "VersionsType" +} + +func (t VersionsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + betaAttribute, ok := attributes["beta"] + + if !ok { + diags.AddError( + "Attribute Missing", + `beta is missing from object`) + + return nil, diags + } + + betaVal, ok := betaAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute)) + } + + deprecatedAttribute, ok := attributes["deprecated"] + + if !ok { + diags.AddError( + "Attribute Missing", + `deprecated is missing from object`) + + return nil, diags + } + + deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute)) + } + + recommendAttribute, ok := attributes["recommend"] + + if !ok { + diags.AddError( + "Attribute Missing", + `recommend is missing from object`) + + return nil, diags + } + + recommendVal, ok := recommendAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute)) + } + + versionAttribute, ok := attributes["version"] + + if !ok { + diags.AddError( + "Attribute Missing", + `version is missing from object`) + + return nil, diags + } + + versionVal, ok := versionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return VersionsValue{ + Beta: betaVal, + Deprecated: deprecatedVal, + Recommend: recommendVal, + Version: versionVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewVersionsValueNull() VersionsValue { + return VersionsValue{ + state: attr.ValueStateNull, + } +} + +func NewVersionsValueUnknown() VersionsValue { + return VersionsValue{ + state: attr.ValueStateUnknown, + } +} + +func NewVersionsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (VersionsValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing VersionsValue Attribute Value", + "While creating a VersionsValue value, a missing attribute value was detected. "+ + "A VersionsValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid VersionsValue Attribute Type", + "While creating a VersionsValue value, an invalid attribute value was detected. "+ + "A VersionsValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("VersionsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra VersionsValue Attribute Value", + "While creating a VersionsValue value, an extra attribute value was detected. "+ + "A VersionsValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra VersionsValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewVersionsValueUnknown(), diags + } + + betaAttribute, ok := attributes["beta"] + + if !ok { + diags.AddError( + "Attribute Missing", + `beta is missing from object`) + + return NewVersionsValueUnknown(), diags + } + + betaVal, ok := betaAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute)) + } + + deprecatedAttribute, ok := attributes["deprecated"] + + if !ok { + diags.AddError( + "Attribute Missing", + `deprecated is missing from object`) + + return NewVersionsValueUnknown(), diags + } + + deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute)) + } + + recommendAttribute, ok := attributes["recommend"] + + if !ok { + diags.AddError( + "Attribute Missing", + `recommend is missing from object`) + + return NewVersionsValueUnknown(), diags + } + + recommendVal, ok := recommendAttribute.(basetypes.BoolValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute)) + } + + versionAttribute, ok := attributes["version"] + + if !ok { + diags.AddError( + "Attribute Missing", + `version is missing from object`) + + return NewVersionsValueUnknown(), diags + } + + versionVal, ok := versionAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute)) + } + + if diags.HasError() { + return NewVersionsValueUnknown(), diags + } + + return VersionsValue{ + Beta: betaVal, + Deprecated: deprecatedVal, + Recommend: recommendVal, + Version: versionVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewVersionsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) VersionsValue { + object, diags := NewVersionsValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewVersionsValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t VersionsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewVersionsValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewVersionsValueUnknown(), nil + } + + if in.IsNull() { + return NewVersionsValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewVersionsValueMust(VersionsValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t VersionsType) ValueType(ctx context.Context) attr.Value { + return VersionsValue{} +} + +var _ basetypes.ObjectValuable = VersionsValue{} + +type VersionsValue struct { + Beta basetypes.BoolValue `tfsdk:"beta"` + Deprecated basetypes.StringValue `tfsdk:"deprecated"` + Recommend basetypes.BoolValue `tfsdk:"recommend"` + Version basetypes.StringValue `tfsdk:"version"` + state attr.ValueState +} + +func (v VersionsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 4) + + var val tftypes.Value + var err error + + attrTypes["beta"] = basetypes.BoolType{}.TerraformType(ctx) + attrTypes["deprecated"] = basetypes.StringType{}.TerraformType(ctx) + attrTypes["recommend"] = basetypes.BoolType{}.TerraformType(ctx) + attrTypes["version"] = basetypes.StringType{}.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 4) + + val, err = v.Beta.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["beta"] = val + + val, err = v.Deprecated.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["deprecated"] = val + + val, err = v.Recommend.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["recommend"] = val + + val, err = v.Version.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["version"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v VersionsValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v VersionsValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v VersionsValue) String() string { + return "VersionsValue" +} + +func (v VersionsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + attributeTypes := map[string]attr.Type{ + "beta": basetypes.BoolType{}, + "deprecated": basetypes.StringType{}, + "recommend": basetypes.BoolType{}, + "version": basetypes.StringType{}, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "beta": v.Beta, + "deprecated": v.Deprecated, + "recommend": v.Recommend, + "version": v.Version, + }) + + return objVal, diags +} + +func (v VersionsValue) Equal(o attr.Value) bool { + other, ok := o.(VersionsValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Beta.Equal(other.Beta) { + return false + } + + if !v.Deprecated.Equal(other.Deprecated) { + return false + } + + if !v.Recommend.Equal(other.Recommend) { + return false + } + + if !v.Version.Equal(other.Version) { + return false + } + + return true +} + +func (v VersionsValue) Type(ctx context.Context) attr.Type { + return VersionsType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v VersionsValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "beta": basetypes.BoolType{}, + "deprecated": basetypes.StringType{}, + "recommend": basetypes.BoolType{}, + "version": basetypes.StringType{}, + } +} diff --git a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml index 1d010ed7..08d7e6cf 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml +++ b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml @@ -2,6 +2,7 @@ fields: - name: 'id' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'instance_id' validators: @@ -32,13 +33,16 @@ fields: - name: 'owner' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'database_name' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' - name: 'collation_name' modifiers: + - 'RequiresReplace' - 'UseStateForUnknown' - name: 'compatibility' @@ -49,3 +53,4 @@ fields: - name: 'compatibility_level' modifiers: - 'UseStateForUnknown' + - 'RequiresReplace' diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 2f77191e..95424987 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -407,7 +407,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r func (r *databaseResource) Update(ctx context.Context, _ resource.UpdateRequest, resp *resource.UpdateResponse) { // TODO: Check update api endpoint - not available at the moment, so return an error for now - core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating database", "Database can't be updated") + core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating database", "there is no way to update a database") } func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go index 6cc07de1..a91040bf 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -208,11 +208,6 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques return } - fmt.Printf("\n\n\n >>>>>> DEBUG <<<<< \n%+v\n\n\n", payload) - tflog.Debug(ctx, "\n\n\n >>>>>> DEBUG <<<<< \n\n\n", map[string]interface{}{ - "encryption": payload.Encryption, - }) - // Create new Instance createResp, err := r.client.CreateInstanceRequest( ctx, From 841e702b95dbc6717ad22ea3b41579cb2e71db4f Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Tue, 17 Feb 2026 09:42:46 +0000 Subject: [PATCH 36/41] fix: encryption_fix (#75) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/75 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- .../resource.tf | 12 ++++- .../resource.tf | 2 +- .../resource.tf | 12 ++++- .../resource.tf | 24 +++++++++ .../resource.tf | 12 +++++ .../resource.tf | 12 +++++ .../postgresflexalpha/instance/resource.go | 3 +- .../sqlserverflexbeta/instance/functions.go | 50 ++++++++----------- 8 files changed, 95 insertions(+), 32 deletions(-) create mode 100644 examples/resources/stackitprivatepreview_sqlserverflexalpha_database/resource.tf create mode 100644 examples/resources/stackitprivatepreview_sqlserverflexbeta_database/resource.tf create mode 100644 examples/resources/stackitprivatepreview_sqlserverflexbeta_user/resource.tf diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_database/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_database/resource.tf index a013b9c6..ad0c051e 100644 --- a/examples/resources/stackitprivatepreview_postgresflexalpha_database/resource.tf +++ b/examples/resources/stackitprivatepreview_postgresflexalpha_database/resource.tf @@ -9,4 +9,14 @@ resource "stackitprivatepreview_postgresflexalpha_database" "example" { import { to = stackitprivatepreview_postgresflexalpha_database.import-example id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}" -} \ No newline at end of file +} + +import { + to = stackitprivatepreview_postgresflexalpha_database.import-example + identity = { + project_id = "project_id" + region = "region" + instance_id = "instance_id" + database_id = "database_id" + } +} diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf index 1f98284a..711a9f60 100644 --- a/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf +++ b/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf @@ -1,4 +1,4 @@ -resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" { +resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" { project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" name = "example-instance" acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf index 01469992..756e854d 100644 --- a/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf +++ b/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf @@ -9,4 +9,14 @@ resource "stackitprivatepreview_postgresflexalpha_user" "example" { import { to = stackitprivatepreview_postgresflexalpha_user.import-example id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}" -} \ No newline at end of file +} + +import { + to = stackitprivatepreview_postgresflexalpha_user.import-example + identity = { + project_id = "project.id" + region = "region" + instance_id = "instance.id" + user_id = "user.id" + } +} diff --git a/examples/resources/stackitprivatepreview_sqlserverflexalpha_database/resource.tf b/examples/resources/stackitprivatepreview_sqlserverflexalpha_database/resource.tf new file mode 100644 index 00000000..b85cc22b --- /dev/null +++ b/examples/resources/stackitprivatepreview_sqlserverflexalpha_database/resource.tf @@ -0,0 +1,24 @@ +resource "stackitprivatepreview_sqlserverflexalpha_database" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + collation = "" + compatibility = "160" + name = "" + owner = "" +} + +# Only use the import statement, if you want to import a existing sqlserverflex database +import { + to = stackitprivatepreview_sqlserverflexalpha_database.import-example + id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" +} + +import { + to = stackitprivatepreview_sqlserverflexalpha_database.import-example + identity = { + project_id = "project.id" + region = "region" + instance_id = "instance.id" + database_id = "database.id" + } +} \ No newline at end of file diff --git a/examples/resources/stackitprivatepreview_sqlserverflexbeta_database/resource.tf b/examples/resources/stackitprivatepreview_sqlserverflexbeta_database/resource.tf new file mode 100644 index 00000000..83c52561 --- /dev/null +++ b/examples/resources/stackitprivatepreview_sqlserverflexbeta_database/resource.tf @@ -0,0 +1,12 @@ +resource "stackitprivatepreview_sqlserverflexalpha_user" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + username = "username" + roles = ["role"] +} + +# Only use the import statement, if you want to import an existing sqlserverflex user +import { + to = stackitprivatepreview_sqlserverflexalpha_user.import-example + id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" +} \ No newline at end of file diff --git a/examples/resources/stackitprivatepreview_sqlserverflexbeta_user/resource.tf b/examples/resources/stackitprivatepreview_sqlserverflexbeta_user/resource.tf new file mode 100644 index 00000000..83c52561 --- /dev/null +++ b/examples/resources/stackitprivatepreview_sqlserverflexbeta_user/resource.tf @@ -0,0 +1,12 @@ +resource "stackitprivatepreview_sqlserverflexalpha_user" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + username = "username" + roles = ["role"] +} + +# Only use the import statement, if you want to import an existing sqlserverflex user +import { + to = stackitprivatepreview_sqlserverflexalpha_user.import-example + id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" +} \ No newline at end of file diff --git a/stackit/internal/services/postgresflexalpha/instance/resource.go b/stackit/internal/services/postgresflexalpha/instance/resource.go index f6402220..74661cd9 100644 --- a/stackit/internal/services/postgresflexalpha/instance/resource.go +++ b/stackit/internal/services/postgresflexalpha/instance/resource.go @@ -243,7 +243,8 @@ func (r *instanceResource) Create( return } - waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).WaitWithContext(ctx) + waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId). + WaitWithContext(ctx) if err != nil { core.LogAndAddError( ctx, diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go index a82eef23..cd18314a 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go @@ -25,7 +25,7 @@ func mapResponseToModel( ) error { m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) m.Edition = types.StringValue(string(resp.GetEdition())) - m.Encryption = handleEncryption(m, resp) + m.Encryption = handleEncryption(ctx, m, resp) m.FlavorId = types.StringValue(resp.GetFlavorId()) m.Id = types.StringValue(resp.GetId()) m.InstanceId = types.StringValue(resp.GetId()) @@ -81,7 +81,7 @@ func mapDataResponseToModel( ) error { m.BackupSchedule = types.StringValue(resp.GetBackupSchedule()) m.Edition = types.StringValue(string(resp.GetEdition())) - m.Encryption = handleDSEncryption(m, resp) + m.Encryption = handleDSEncryption(ctx, m, resp) m.FlavorId = types.StringValue(resp.GetFlavorId()) m.Id = types.StringValue(resp.GetId()) m.InstanceId = types.StringValue(resp.GetId()) @@ -130,6 +130,7 @@ func mapDataResponseToModel( } func handleEncryption( + ctx context.Context, m *sqlserverflexbetaResGen.InstanceModel, resp *sqlserverflexbeta.GetInstanceResponse, ) sqlserverflexbetaResGen.EncryptionValue { @@ -145,23 +146,20 @@ func handleEncryption( return m.Encryption } - enc := sqlserverflexbetaResGen.NewEncryptionValueNull() - if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { - enc.KekKeyId = types.StringValue(kVal) - } - if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { - enc.KekKeyRingId = types.StringValue(kkVal) - } - if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { - enc.KekKeyVersion = types.StringValue(kkvVal) - } - if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { - enc.ServiceAccount = types.StringValue(sa) - } + enc := sqlserverflexbetaResGen.NewEncryptionValueMust( + sqlserverflexbetaResGen.EncryptionValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "kek_key_id": types.StringValue(resp.Encryption.GetKekKeyId()), + "kek_key_ring_id": types.StringValue(resp.Encryption.GetKekKeyRingId()), + "kek_key_version": types.StringValue(resp.Encryption.GetKekKeyVersion()), + "service_account": types.StringValue(resp.Encryption.GetServiceAccount()), + }, + ) return enc } func handleDSEncryption( + ctx context.Context, m *dataSourceModel, resp *sqlserverflexbeta.GetInstanceResponse, ) sqlserverflexbetaDataGen.EncryptionValue { @@ -177,19 +175,15 @@ func handleDSEncryption( return m.Encryption } - enc := sqlserverflexbetaDataGen.NewEncryptionValueNull() - if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { - enc.KekKeyId = types.StringValue(kVal) - } - if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { - enc.KekKeyRingId = types.StringValue(kkVal) - } - if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { - enc.KekKeyVersion = types.StringValue(kkvVal) - } - if sa, ok := resp.Encryption.GetServiceAccountOk(); ok { - enc.ServiceAccount = types.StringValue(sa) - } + enc := sqlserverflexbetaDataGen.NewEncryptionValueMust( + sqlserverflexbetaDataGen.EncryptionValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "kek_key_id": types.StringValue(resp.Encryption.GetKekKeyId()), + "kek_key_ring_id": types.StringValue(resp.Encryption.GetKekKeyRingId()), + "kek_key_version": types.StringValue(resp.Encryption.GetKekKeyVersion()), + "service_account": types.StringValue(resp.Encryption.GetServiceAccount()), + }, + ) return enc } From 36eccc52c330ebd0996b80df4ce2d0160ee1420b Mon Sep 17 00:00:00 2001 From: "Marcel S. Henselin" Date: Tue, 17 Feb 2026 17:18:40 +0000 Subject: [PATCH 37/41] fix: null_ident (#76) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/76 Co-authored-by: Marcel S. Henselin Co-committed-by: Marcel S. Henselin --- docs/resources/postgresflexalpha_database.md | 10 + docs/resources/postgresflexalpha_instance.md | 2 +- docs/resources/postgresflexalpha_user.md | 10 + docs/resources/sqlserverflexalpha_database.md | 27 +++ docs/resources/sqlserverflexbeta_database.md | 15 ++ docs/resources/sqlserverflexbeta_user.md | 15 ++ go.mod | 5 +- go.sum | 57 ++--- internal/testutils/activateMocks.go | 32 +++ sample/sqlserver/flavor.tf | 4 +- sample/sqlserver/sqlserver.tf | 101 ++------- .../postgresflexalpha/database/resource.go | 20 +- .../sqlserverflexalpha/database/resource.go | 7 - .../sqlserverflexbeta/database/resource.go | 31 +-- stackit/provider_acc_test.go | 197 +++++++++++++++--- 15 files changed, 329 insertions(+), 204 deletions(-) create mode 100644 internal/testutils/activateMocks.go diff --git a/docs/resources/postgresflexalpha_database.md b/docs/resources/postgresflexalpha_database.md index 7834287e..6c94fd62 100644 --- a/docs/resources/postgresflexalpha_database.md +++ b/docs/resources/postgresflexalpha_database.md @@ -25,6 +25,16 @@ import { to = stackitprivatepreview_postgresflexalpha_database.import-example id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}" } + +import { + to = stackitprivatepreview_postgresflexalpha_database.import-example + identity = { + project_id = "project_id" + region = "region" + instance_id = "instance_id" + database_id = "database_id" + } +} ``` diff --git a/docs/resources/postgresflexalpha_instance.md b/docs/resources/postgresflexalpha_instance.md index fb735ecc..3f682bb5 100644 --- a/docs/resources/postgresflexalpha_instance.md +++ b/docs/resources/postgresflexalpha_instance.md @@ -13,7 +13,7 @@ description: |- ## Example Usage ```terraform -resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" { +resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" { project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" name = "example-instance" acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] diff --git a/docs/resources/postgresflexalpha_user.md b/docs/resources/postgresflexalpha_user.md index d0b9c500..168d17ff 100644 --- a/docs/resources/postgresflexalpha_user.md +++ b/docs/resources/postgresflexalpha_user.md @@ -25,6 +25,16 @@ import { to = stackitprivatepreview_postgresflexalpha_user.import-example id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}" } + +import { + to = stackitprivatepreview_postgresflexalpha_user.import-example + identity = { + project_id = "project.id" + region = "region" + instance_id = "instance.id" + user_id = "user.id" + } +} ``` diff --git a/docs/resources/sqlserverflexalpha_database.md b/docs/resources/sqlserverflexalpha_database.md index fd6ba0fd..7d8f050b 100644 --- a/docs/resources/sqlserverflexalpha_database.md +++ b/docs/resources/sqlserverflexalpha_database.md @@ -10,7 +10,34 @@ description: |- +## Example Usage +```terraform +resource "stackitprivatepreview_sqlserverflexalpha_database" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + collation = "" + compatibility = "160" + name = "" + owner = "" +} + +# Only use the import statement, if you want to import a existing sqlserverflex database +import { + to = stackitprivatepreview_sqlserverflexalpha_database.import-example + id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" +} + +import { + to = stackitprivatepreview_sqlserverflexalpha_database.import-example + identity = { + project_id = "project.id" + region = "region" + instance_id = "instance.id" + database_id = "database.id" + } +} +``` ## Schema diff --git a/docs/resources/sqlserverflexbeta_database.md b/docs/resources/sqlserverflexbeta_database.md index 893433fe..fabaaccb 100644 --- a/docs/resources/sqlserverflexbeta_database.md +++ b/docs/resources/sqlserverflexbeta_database.md @@ -10,7 +10,22 @@ description: |- +## Example Usage +```terraform +resource "stackitprivatepreview_sqlserverflexalpha_user" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + username = "username" + roles = ["role"] +} + +# Only use the import statement, if you want to import an existing sqlserverflex user +import { + to = stackitprivatepreview_sqlserverflexalpha_user.import-example + id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" +} +``` ## Schema diff --git a/docs/resources/sqlserverflexbeta_user.md b/docs/resources/sqlserverflexbeta_user.md index ea1577a8..81d6da28 100644 --- a/docs/resources/sqlserverflexbeta_user.md +++ b/docs/resources/sqlserverflexbeta_user.md @@ -10,7 +10,22 @@ description: |- +## Example Usage +```terraform +resource "stackitprivatepreview_sqlserverflexalpha_user" "example" { + project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + username = "username" + roles = ["role"] +} + +# Only use the import statement, if you want to import an existing sqlserverflex user +import { + to = stackitprivatepreview_sqlserverflexalpha_user.import-example + id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" +} +``` ## Schema diff --git a/go.mod b/go.mod index 9f74c926..749c34d5 100644 --- a/go.mod +++ b/go.mod @@ -13,11 +13,13 @@ require ( github.com/hashicorp/terraform-plugin-testing v1.14.0 github.com/iancoleman/strcase v0.3.0 github.com/ivanpirog/coloredcobra v1.0.1 + github.com/jarcoal/httpmock v1.4.1 github.com/joho/godotenv v1.5.1 github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1 github.com/spf13/cobra v1.10.2 github.com/stackitcloud/stackit-sdk-go/core v0.21.1 github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha + github.com/stretchr/testify v1.11.1 github.com/teambition/rrule-go v1.8.2 gopkg.in/yaml.v3 v3.0.1 ) @@ -33,6 +35,7 @@ require ( github.com/agext/levenshtein v1.2.3 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/cloudflare/circl v1.6.3 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/fatih/color v1.18.0 // indirect github.com/golang-jwt/jwt/v5 v5.3.1 // indirect github.com/golang/protobuf v1.5.4 // indirect @@ -64,8 +67,8 @@ require ( github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/oklog/run v1.2.0 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/spf13/pflag v1.0.10 // indirect - github.com/stretchr/testify v1.11.1 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect diff --git a/go.sum b/go.sum index ba4fe023..6553d35b 100644 --- a/go.sum +++ b/go.sum @@ -13,8 +13,8 @@ github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw= github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= -github.com/cloudflare/circl v1.6.2 h1:hL7VBpHHKzrV5WTfHCaBsgx/HGbBYlgrwvNXEVDYYsQ= -github.com/cloudflare/circl v1.6.2/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8= github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= @@ -35,17 +35,14 @@ github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66D github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= -github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60= -github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k= github.com/go-git/go-git/v5 v5.16.5 h1:mdkuqblwr57kVfXri5TTH+nMFLNUxIj9Z7F5ykFbw5s= +github.com/go-git/go-git/v5 v5.16.5/go.mod h1:QOMLpNf1qxuSY4StA/ArOdfFR2TrKEjJiye2kel2m+M= github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= -github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY= github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= @@ -84,16 +81,12 @@ github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/C github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4= github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/hc-install v0.9.2 h1:v80EtNX4fCVHqzL9Lg/2xkp62bbvQMnvPQ0G+OmtO24= -github.com/hashicorp/hc-install v0.9.2/go.mod h1:XUqBQNnuT4RsxoxiM9ZaUk0NX8hi2h+Lb6/c0OZnC/I= github.com/hashicorp/hc-install v0.9.3 h1:1H4dgmgzxEVwT6E/d/vIL5ORGVKz9twRwDw+qA5Hyho= github.com/hashicorp/hc-install v0.9.3/go.mod h1:FQlQ5I3I/X409N/J1U4pPeQQz1R3BoV0IysB7aiaQE0= github.com/hashicorp/hcl/v2 v2.24.0 h1:2QJdZ454DSsYGoaE6QheQZjtKZSUs9Nh2izTWiwQxvE= github.com/hashicorp/hcl/v2 v2.24.0/go.mod h1:oGoO1FIQYfn/AgyOhlg9qLC6/nOJPX3qGbkZpYAcqfM= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/terraform-exec v0.24.0 h1:mL0xlk9H5g2bn0pPF6JQZk5YlByqSqrO5VoaNtAf8OE= -github.com/hashicorp/terraform-exec v0.24.0/go.mod h1:lluc/rDYfAhYdslLJQg3J0oDqo88oGQAdHR+wDqFvo4= github.com/hashicorp/terraform-exec v0.25.0 h1:Bkt6m3VkJqYh+laFMrWIpy9KHYFITpOyzRMNI35rNaY= github.com/hashicorp/terraform-exec v0.25.0/go.mod h1:dl9IwsCfklDU6I4wq9/StFDp7dNbH/h5AnfS1RmiUl8= github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU= @@ -106,8 +99,6 @@ github.com/hashicorp/terraform-plugin-go v0.29.0 h1:1nXKl/nSpaYIUBU1IG/EsDOX0vv+ github.com/hashicorp/terraform-plugin-go v0.29.0/go.mod h1:vYZbIyvxyy0FWSmDHChCqKvI40cFTDGSb3D8D70i9GM= github.com/hashicorp/terraform-plugin-log v0.10.0 h1:eu2kW6/QBVdN4P3Ju2WiB2W3ObjkAsyfBsL3Wh1fj3g= github.com/hashicorp/terraform-plugin-log v0.10.0/go.mod h1:/9RR5Cv2aAbrqcTSdNmY1NRHP4E3ekrXRGjqORpXyB0= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 h1:mlAq/OrMlg04IuJT7NpefI1wwtdpWudnEmjuQs04t/4= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1/go.mod h1:GQhpKVvvuwzD79e8/NZ+xzj+ZpWovdPAe8nfV/skwNU= github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 h1:sy0Bc4A/GZNdmwpVX/Its9aIweCfY9fRfY1IgmXkOj8= github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2/go.mod h1:MQisArXYCowb/5q4lDS/BWp5KnXiZ4lxOIyrpKBpUBE= github.com/hashicorp/terraform-plugin-testing v1.14.0 h1:5t4VKrjOJ0rg0sVuSJ86dz5K7PHsMO6OKrHFzDBerWA= @@ -125,6 +116,8 @@ github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2 github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/ivanpirog/coloredcobra v1.0.1 h1:aURSdEmlR90/tSiWS0dMjdwOvCVUeYLfltLfbgNxrN4= github.com/ivanpirog/coloredcobra v1.0.1/go.mod h1:iho4nEKcnwZFiniGSdcgdvRgZNjxm+h20acv8vqmN6Q= +github.com/jarcoal/httpmock v1.4.1 h1:0Ju+VCFuARfFlhVXFc2HxlcQkfB+Xq12/EotHko+x2A= +github.com/jarcoal/httpmock v1.4.1/go.mod h1:ftW1xULwo+j0R0JJkJIIi7UKigZUXCLLanykgjwBXL0= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jhump/protoreflect v1.17.0 h1:qOEr613fac2lOuTgWN4tPAtLL7fUSbuJL5X5XumQh94= @@ -150,6 +143,8 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/maxatome/go-testdeep v1.14.0 h1:rRlLv1+kI8eOI3OaBXZwb3O7xY3exRzdW5QyX48g9wI= +github.com/maxatome/go-testdeep v1.14.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= @@ -181,8 +176,6 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/stackitcloud/stackit-sdk-go/core v0.21.0 h1:QXZqiaO7U/4IpTkJfzt4dt6QxJzG2uUS12mBnHpYNik= -github.com/stackitcloud/stackit-sdk-go/core v0.21.0/go.mod h1:fqto7M82ynGhEnpZU6VkQKYWYoFG5goC076JWXTUPRQ= github.com/stackitcloud/stackit-sdk-go/core v0.21.1 h1:Y/PcAgM7DPYMNqum0MLv4n1mF9ieuevzcCIZYQfm3Ts= github.com/stackitcloud/stackit-sdk-go/core v0.21.1/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI= github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha h1:ugpMOMUZGB0yXsWcfe97F7GCdjlexbjFuGD8ZeyMSts= @@ -209,39 +202,28 @@ github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6 github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= -go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= -go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48= -go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= -go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= +go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8= go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0= -go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= -go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs= go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18= -go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= -go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= +go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE= go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8= -go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= -go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= +go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew= go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI= +go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= -golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c= -golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU= golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8= golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= -golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60= golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -260,32 +242,23 @@ golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= -golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 h1:i0p03B68+xC1kD2QUO8JzDTPXCzhN56OLJ+IhHY8U3A= -golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5/go.mod h1:b7fPSJ0pKZ3ccUh8gnTONJxhn3c/PS6tyzQvyqw4iA8= golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1 h1:QNaHp8YvpPswfDNxlCmJyeesxbGOgaKf41iT9/QrErY= golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1/go.mod h1:NuITXsA9cTiqnXtVk+/wrBT2Ja4X5hsfGOYRJ6kgYjs= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= -golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg= +golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= -golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc= -golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg= golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k= golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -295,12 +268,8 @@ gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 h1:mWPCjDEyshlQYzBpMNHaEof6UX1PmHcaUODUywQ0uac= google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= -google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= -google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= google.golang.org/grpc v1.79.1 h1:zGhSi45ODB9/p3VAawt9a+O/MULLl9dpizzNNpq7flY= google.golang.org/grpc v1.79.1/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= diff --git a/internal/testutils/activateMocks.go b/internal/testutils/activateMocks.go new file mode 100644 index 00000000..ddeaff83 --- /dev/null +++ b/internal/testutils/activateMocks.go @@ -0,0 +1,32 @@ +package testutils + +import ( + "fmt" + "net/http" + "path/filepath" + "regexp" + "runtime" + "strings" + + "github.com/jarcoal/httpmock" +) + +func TestName() string { + pc, _, _, _ := runtime.Caller(1) + nameFull := runtime.FuncForPC(pc).Name() + nameEnd := filepath.Ext(nameFull) + name := strings.TrimPrefix(nameEnd, ".") + return name +} + +func ActivateEnvironmentHttpMocks() { + httpmock.RegisterNoResponder(func(req *http.Request) (*http.Response, error) { + return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL) + }) + + httpmock.RegisterRegexpResponder("GET", regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`), + func(req *http.Request) (*http.Response, error) { + return httpmock.NewStringResponse(http.StatusOK, httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String()), nil + }) + +} diff --git a/sample/sqlserver/flavor.tf b/sample/sqlserver/flavor.tf index 216c8f1e..c491cc09 100644 --- a/sample/sqlserver/flavor.tf +++ b/sample/sqlserver/flavor.tf @@ -1,5 +1,5 @@ -data "stackitprivatepreview_sqlserverflexalpha_flavor" "sqlserver_flavor" { +data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor" { project_id = var.project_id region = "eu01" cpu = 4 @@ -9,5 +9,5 @@ data "stackitprivatepreview_sqlserverflexalpha_flavor" "sqlserver_flavor" { } output "sqlserver_flavor" { - value = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id + value = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id } diff --git a/sample/sqlserver/sqlserver.tf b/sample/sqlserver/sqlserver.tf index bf6a88d1..d18f499c 100644 --- a/sample/sqlserver/sqlserver.tf +++ b/sample/sqlserver/sqlserver.tf @@ -18,15 +18,15 @@ # value = stackit_kms_key.key.key_id # } -resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-001" { +resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" { project_id = var.project_id - name = "msh-sna-001" + name = "msh-beta-sna-001" backup_schedule = "0 3 * * *" retention_days = 31 - flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id + flavor_id = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id storage = { class = "premium-perf2-stackit" - size = 50 + size = 10 } version = 2022 encryption = { @@ -34,9 +34,11 @@ resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-001" { #keyring_id = stackit_kms_keyring.keyring.keyring_id #key_version = 1 # key with scope public - kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b" + # kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b" + kek_key_id = "c6878f92-ce55-4b79-8236-ba9d001d7967" # msh-k-001 # key_id = var.key_id - kek_key_ring_id = var.keyring_id + # kek_key_ring_id = var.keyring_id + kek_key_ring_id = "0dea3f5f-9947-4dda-a9d3-18418832cefe" # msh-kr-sna01 kek_key_version = var.key_version service_account = var.sa_email } @@ -46,83 +48,16 @@ resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-001" { } } -resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-101" { - project_id = var.project_id - name = "msh-sna-101" - backup_schedule = "0 3 * * *" - retention_days = 31 - flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id - storage = { - class = "premium-perf2-stackit" - size = 50 - } - version = 2022 - encryption = { - #key_id = stackit_kms_key.key.key_id - #keyring_id = stackit_kms_keyring.keyring.keyring_id - #key_version = 1 - # key with scope public - kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b" - # key_id = var.key_id - kek_key_ring_id = var.keyring_id - kek_key_version = var.key_version - service_account = var.sa_email - } - network = { - acl = ["0.0.0.0/0", "193.148.160.0/19"] - access_scope = "SNA" - } +resource "stackitprivatepreview_sqlserverflexbeta_user" "betauser" { + project_id = var.project_id + instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id + username = "betauser" + roles = ["##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"] } -resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-nosna-001" { - project_id = var.project_id - name = "msh-nosna-001" - backup_schedule = "0 3 * * *" - retention_days = 31 - flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id - storage = { - class = "premium-perf2-stackit" - size = 50 - } - version = 2022 - # encryption = { - # #key_id = stackit_kms_key.key.key_id - # #keyring_id = stackit_kms_keyring.keyring.keyring_id - # #key_version = 1 - # #key_id = var.key_id - # # key with scope public - # key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b" - # keyring_id = var.keyring_id - # key_version = var.key_version - # service_account = var.sa_email - # } - network = { - acl = ["0.0.0.0/0", "193.148.160.0/19"] - access_scope = "PUBLIC" - } +resource "stackitprivatepreview_sqlserverflexbeta_database" "betadb" { + project_id = var.project_id + instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id + name = "mshtest002" + owner = stackitprivatepreview_sqlserverflexbeta_user.betauser.username } - -# data "stackitprivatepreview_sqlserverflexalpha_instance" "test" { -# project_id = var.project_id -# instance_id = var.instance_id -# region = "eu01" -# } - -# output "test" { -# value = data.stackitprivatepreview_sqlserverflexalpha_instance.test -# } - -# resource "stackitprivatepreview_sqlserverflexalpha_user" "ptlsdbadminuser" { -# project_id = var.project_id -# instance_id = stackitprivatepreview_sqlserverflexalpha_instance.sqlsrv.instance_id -# username = var.db_admin_username -# roles = ["##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"] -# } - -# resource "stackitprivatepreview_sqlserverflexalpha_user" "ptlsdbuser" { -# project_id = var.project_id -# instance_id = stackitprivatepreview_sqlserverflexalpha_instance.sqlsrv.instance_id -# username = var.db_username -# roles = ["##STACKIT_LoginManager##"] -# } - diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go index 4e3dc936..2b12c281 100644 --- a/stackit/internal/services/postgresflexalpha/database/resource.go +++ b/stackit/internal/services/postgresflexalpha/database/resource.go @@ -356,24 +356,12 @@ func (r *databaseResource) Update( return } - // Read identity data - var identityData DatabaseResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) - projectId, region, instanceId, databaseId64, errExt := r.extractIdentityData(model, identityData) - if errExt != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - extractErrorSummary, - fmt.Sprintf(extractErrorMessage, errExt), - ) - } + projectId := model.ProjectId.ValueString() + instanceId := model.InstanceId.ValueString() + region := model.Region.ValueString() + databaseId64 := model.DatabaseId.ValueInt64() if databaseId64 > math.MaxInt32 { core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)") diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index eec284f1..72e8105f 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -352,13 +352,6 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r return } - // Read identity data - var identityData DatabaseResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) projectId := model.ProjectId.ValueString() diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 95424987..99713dcf 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -340,13 +340,6 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r return } - // Read identity data - var identityData DatabaseResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - ctx = core.InitProviderContext(ctx) projectId := model.ProjectId.ValueString() @@ -419,24 +412,10 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques return } - // Read identity data - var identityData DatabaseResourceIdentityModel - resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...) - if resp.Diagnostics.HasError() { - return - } - - ctx = core.InitProviderContext(ctx) - - projectId, region, instanceId, databaseName, errExt := r.extractIdentityData(model, identityData) - if errExt != nil { - core.LogAndAddError( - ctx, - &resp.Diagnostics, - extractErrorSummary, - fmt.Sprintf(extractErrorMessage, errExt), - ) - } + projectId := model.ProjectId.ValueString() + region := model.Region.ValueString() + instanceId := model.InstanceId.ValueString() + databaseName := model.DatabaseName.ValueString() ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) @@ -455,6 +434,8 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques return } + // TODO: wait handler?? + ctx = core.LogResponse(ctx) resp.State.RemoveResource(ctx) diff --git a/stackit/provider_acc_test.go b/stackit/provider_acc_test.go index 854f00a7..11cdf672 100644 --- a/stackit/provider_acc_test.go +++ b/stackit/provider_acc_test.go @@ -1,15 +1,41 @@ package stackit_test import ( + "context" _ "embed" "fmt" - "log/slog" + "net/http" "os" + "reflect" "regexp" "testing" + "time" - "github.com/hashicorp/terraform-plugin-testing/helper/acctest" - "github.com/joho/godotenv" + "github.com/golang-jwt/jwt/v5" + test "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/jarcoal/httpmock" + "github.com/stackitcloud/stackit-sdk-go/core/clients" + "github.com/stackitcloud/stackit-sdk-go/core/utils" + "github.com/stretchr/testify/require" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" + postgresFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + tfResource "github.com/hashicorp/terraform-plugin-framework/resource" + + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit" + postgresFlexAlphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database" + postgresflexalphaFlavors "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors" + postgresFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance" + postgresFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user" + sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database" + sqlserverFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor" + sqlserverFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance" + sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user" + sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database" + sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor" + sqlserverFlexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance" + sqlserverFlexBetaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" @@ -28,34 +54,155 @@ var providerValidAttributes string var testConfigProviderCredentials config.Variables -func setup() { - err := godotenv.Load() - if err != nil { - slog.Info("could not find .env file - not loading .env") - return - } - slog.Info("loaded .env file") - - testConfigProviderCredentials = config.Variables{ - "project_id": config.StringVariable(os.Getenv("TF_ACC_PROJECT_ID")), - "region": config.StringVariable(os.Getenv("TF_ACC_REGION")), - "service_account_key_path": config.StringVariable(os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE")), - "name": config.StringVariable( - fmt.Sprintf( - "tf-acc-prov%s", - acctest.RandStringFromCharSet(3, acctest.CharSetAlphaNum), - ), - ), - } -} - func TestMain(m *testing.M) { - setup() + testutils.Setup() code := m.Run() // shutdown() os.Exit(code) } +func TestMshTest(t *testing.T) { + httpmock.Activate() + defer httpmock.DeactivateAndReset() + + testutils.ActivateEnvironmentHttpMocks() + + httpmock.RegisterResponder("POST", `https://service-account.api.stackit.cloud/token`, + func(req *http.Request) (*http.Response, error) { + token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{ + "foo": "bar", + "nbf": time.Date(2015, 10, 10, 12, 0, 0, 0, time.UTC).Unix(), + }) + // Sign and get the complete encoded token as a string using the secret + tokenString, err := token.SignedString([]byte("mySecret")) + if err != nil { + panic(err) + } + + tR := clients.TokenResponseBody{ + AccessToken: tokenString, + ExpiresIn: 3600, + RefreshToken: "", + Scope: "", + TokenType: "", + } + + return httpmock.NewJsonResponse(http.StatusOK, tR) + }) + + httpmock.RegisterResponder("GET", `https://postgres-flex-service.api.eu01.stackit.cloud/v3alpha1/projects/xyz-project-id/regions/eu01/flavors?page=1&size=25&sort=id.asc`, + func(req *http.Request) (*http.Response, error) { + res := postgresflexalpha.GetFlavorsResponse{ + Flavors: &[]postgresflexalpha.ListFlavors{ + { + Cpu: nil, + Description: nil, + Id: nil, + MaxGB: nil, + Memory: nil, + MinGB: nil, + NodeType: nil, + StorageClasses: nil, + }, + }, + Pagination: &postgresflexalpha.Pagination{ + Page: utils.Ptr(int64(1)), + Size: utils.Ptr(int64(25)), + Sort: nil, + TotalPages: utils.Ptr(int64(1)), + TotalRows: utils.Ptr(int64(0)), + }, + } + return httpmock.NewJsonResponse( + http.StatusOK, + res, + ) + }, + ) + + test.Test(t, test.TestCase{ + IsUnitTest: true, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []test.TestStep{ + { + ConfigVariables: map[string]config.Variable{ + "project_id": config.StringVariable("xyz-project-id"), + }, + Config: fmt.Sprintf(` + provider "stackitprivatepreview" { + default_region = "%[1]s" + service_account_key_path = "%[2]s" + } + variable "project_id" { + type = string + } + data "stackitprivatepreview_postgresflexalpha_flavor" "all" { + project_id = var.project_id + region = "eu01" + cpu = 2 + ram = 4 + node_type = "Single" + storage_class = "premium-perf2-stackit" + }`, + os.Getenv("TF_ACC_REGION"), + os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"), + ), + }, + }, + }) +} + +func TestUnitProviderHasChildDataSources_Basic(t *testing.T) { + expectedDataSources := []datasource.DataSource{ + postgresFlexAlphaFlavor.NewFlavorDataSource(), + // postgresFlexAlphaFlavor.NewFlavorListDataSource, + postgresFlexAlphaDatabase.NewDatabaseDataSource(), + postgresFlexAlphaInstance.NewInstanceDataSource(), + postgresFlexAlphaUser.NewUserDataSource(), + postgresflexalphaFlavors.NewFlavorsDataSource(), + + sqlserverFlexAlphaFlavor.NewFlavorDataSource(), + sqlserverFlexAlphaInstance.NewInstanceDataSource(), + sqlserverFlexAlphaUser.NewUserDataSource(), + sqlserverflexalphaDatabase.NewDatabaseDataSource(), + + sqlserverflexBetaDatabase.NewDatabaseDataSource(), + sqlserverFlexBetaInstance.NewInstanceDataSource(), + sqlserverFlexBetaUser.NewUserDataSource(), + sqlserverFlexBetaFlavor.NewFlavorDataSource(), + } + datasources := stackit.New("testing")().(*stackit.Provider).DataSources(context.Background()) + + if !reflect.DeepEqual(len(expectedDataSources), len(datasources)) { + for _, d := range datasources { + require.Containsf(t, expectedDataSources, d(), "Data source %+v was not expected", reflect.TypeOf(d())) + } + } +} + +func TestUnitProviderHasChildResources_Basic(t *testing.T) { + expectedResources := []tfResource.Resource{ + postgresFlexAlphaInstance.NewInstanceResource(), + postgresFlexAlphaUser.NewUserResource(), + postgresFlexAlphaDatabase.NewDatabaseResource(), + + sqlserverFlexAlphaInstance.NewInstanceResource(), + sqlserverFlexAlphaUser.NewUserResource(), + sqlserverflexalphaDatabase.NewDatabaseResource(), + + sqlserverFlexBetaInstance.NewInstanceResource(), + sqlserverFlexBetaUser.NewUserResource(), + sqlserverflexBetaDatabase.NewDatabaseResource(), + } + resources := stackit.New("testing")().(*stackit.Provider).Resources(context.Background()) + + if !reflect.DeepEqual(len(expectedResources), len(resources)) { + for _, d := range resources { + require.Containsf(t, expectedResources, d(), "Resource %+v was not expected", reflect.TypeOf(d())) + } + } +} + func TestAccEnvVarServiceAccountPathValid(t *testing.T) { t.Skip("needs refactoring") // Check if acceptance tests should be run From 4a2819787d453bb7ce98083b57bb4727e334b191 Mon Sep 17 00:00:00 2001 From: Andre Harms Date: Thu, 19 Feb 2026 08:54:34 +0000 Subject: [PATCH 38/41] fix: linting (#77) ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/77 Co-authored-by: Andre Harms Co-committed-by: Andre Harms --- Makefile | 6 +- cmd/cmd/build/build.go | 335 ++++-- cmd/cmd/build/copy.go | 15 +- cmd/cmd/buildCmd.go | 4 +- cmd/cmd/examplesCmd.go | 51 +- cmd/cmd/getFieldsCmd.go | 59 +- cmd/cmd/publish/architecture.go | 44 +- cmd/cmd/publish/provider.go | 36 +- cmd/cmd/publish/versions.go | 33 +- cmd/cmd/publishCmd.go | 21 +- .../postgresflexalpha_database.md | 38 - docs/data-sources/postgresflexalpha_flavor.md | 54 - .../data-sources/postgresflexalpha_flavors.md | 68 -- .../postgresflexalpha_instance.md | 87 -- docs/data-sources/postgresflexalpha_user.md | 42 - .../sqlserverflexalpha_database.md | 32 - .../data-sources/sqlserverflexalpha_flavor.md | 54 - .../sqlserverflexalpha_instance.md | 77 -- docs/data-sources/sqlserverflexalpha_user.md | 62 - .../sqlserverflexbeta_database.md | 40 - docs/data-sources/sqlserverflexbeta_flavor.md | 54 - .../sqlserverflexbeta_instance.md | 77 -- docs/data-sources/sqlserverflexbeta_user.md | 54 - docs/index.md | 83 -- docs/resources/postgresflexalpha_database.md | 57 - docs/resources/postgresflexalpha_instance.md | 131 --- docs/resources/postgresflexalpha_user.md | 59 - docs/resources/sqlserverflexalpha_database.md | 63 -- docs/resources/sqlserverflexalpha_instance.md | 103 -- docs/resources/sqlserverflexalpha_user.md | 53 - docs/resources/sqlserverflexbeta_database.md | 51 - docs/resources/sqlserverflexbeta_instance.md | 158 --- docs/resources/sqlserverflexbeta_user.md | 53 - .../resource.tf | 2 +- .../resource.tf | 2 +- go.mod | 233 +++- go.sum | 1005 ++++++++++++++++- golang-ci.yaml | 29 +- internal/testutils/activateMocks.go | 21 +- scripts/lint-golangci-lint.sh | 19 - scripts/project.sh | 6 +- scripts/tfplugindocs.sh | 2 +- stackit/internal/core/core.go | 34 +- stackit/internal/core/retry_round_tripper.go | 6 +- .../internal/core/retry_round_tripper_test.go | 14 +- .../instance_data_source_gen.go | 501 ++++++-- .../postgresflexalpha/instance/functions.go | 40 +- .../postgresflexalpha/instance/resource.go | 16 - .../resources_gen/instance_resource_gen.go | 501 ++++++-- .../postgresflex_acc_test.go | 271 +++-- .../postgresflexalpha/user/resource.go | 1 - .../sqlserverflexalpha/database/datasource.go | 1 - .../sqlserverflexalpha/database/resource.go | 29 +- .../sqlserverflexalpha/instance/functions.go | 1 - .../sqlserverflexalpha/instance/resource.go | 5 +- .../sqlserverflex_acc_test.go | 8 +- .../sqlserverflexalpha/user/datasource.go | 2 - .../sqlserverflexalpha/user/resource.go | 6 +- .../sqlserverflexbeta/database/datasource.go | 1 - .../sqlserverflexbeta/database/resource.go | 52 +- .../sqlserverflexbeta/instance/functions.go | 1 - .../instance/functions_test.go | 106 +- .../sqlserverflexbeta/instance/resource.go | 5 +- .../sqlserverflex_acc_test.go | 8 +- .../sqlserverflexbeta/user/datasource.go | 2 - .../sqlserverflexbeta/user/resource.go | 35 +- .../internal/wait/postgresflexalpha/wait.go | 30 +- .../internal/wait/sqlserverflexalpha/wait.go | 17 +- .../wait/sqlserverflexalpha/wait_test.go | 3 +- .../internal/wait/sqlserverflexbeta/wait.go | 29 +- .../wait/sqlserverflexbeta/wait_test.go | 3 +- stackit/provider.go | 5 +- stackit/provider_acc_test.go | 226 ++-- tools/tools.go | 10 + 74 files changed, 3010 insertions(+), 2432 deletions(-) delete mode 100644 docs/data-sources/postgresflexalpha_database.md delete mode 100644 docs/data-sources/postgresflexalpha_flavor.md delete mode 100644 docs/data-sources/postgresflexalpha_flavors.md delete mode 100644 docs/data-sources/postgresflexalpha_instance.md delete mode 100644 docs/data-sources/postgresflexalpha_user.md delete mode 100644 docs/data-sources/sqlserverflexalpha_database.md delete mode 100644 docs/data-sources/sqlserverflexalpha_flavor.md delete mode 100644 docs/data-sources/sqlserverflexalpha_instance.md delete mode 100644 docs/data-sources/sqlserverflexalpha_user.md delete mode 100644 docs/data-sources/sqlserverflexbeta_database.md delete mode 100644 docs/data-sources/sqlserverflexbeta_flavor.md delete mode 100644 docs/data-sources/sqlserverflexbeta_instance.md delete mode 100644 docs/data-sources/sqlserverflexbeta_user.md delete mode 100644 docs/index.md delete mode 100644 docs/resources/postgresflexalpha_database.md delete mode 100644 docs/resources/postgresflexalpha_instance.md delete mode 100644 docs/resources/postgresflexalpha_user.md delete mode 100644 docs/resources/sqlserverflexalpha_database.md delete mode 100644 docs/resources/sqlserverflexalpha_instance.md delete mode 100644 docs/resources/sqlserverflexalpha_user.md delete mode 100644 docs/resources/sqlserverflexbeta_database.md delete mode 100644 docs/resources/sqlserverflexbeta_instance.md delete mode 100644 docs/resources/sqlserverflexbeta_user.md delete mode 100755 scripts/lint-golangci-lint.sh diff --git a/Makefile b/Makefile index 86a2a0f2..680cf020 100644 --- a/Makefile +++ b/Makefile @@ -12,9 +12,10 @@ project-tools: # LINT lint-golangci-lint: @echo "Linting with golangci-lint" - @$(SCRIPTS_BASE)/lint-golangci-lint.sh + @go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint run --fix --config golang-ci.yaml -lint-tf: + +lint-tf: @echo "Linting terraform files" @terraform fmt -check -diff -recursive @@ -23,6 +24,7 @@ lint: lint-golangci-lint lint-tf # DOCUMENTATION GENERATION generate-docs: @echo "Generating documentation with tfplugindocs" + @$(SCRIPTS_BASE)/tfplugindocs.sh build: diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go index d381de63..7b11f214 100644 --- a/cmd/cmd/build/build.go +++ b/cmd/cmd/build/build.go @@ -60,7 +60,7 @@ func (b *Builder) Build() error { if !b.PackagesOnly { slog.Info(" ... Checking needed commands available") - err := checkCommands([]string{"tfplugingen-framework", "tfplugingen-openapi"}) + err := checkCommands([]string{}) if err != nil { return err } @@ -111,7 +111,7 @@ func (b *Builder) Build() error { } slog.Info("Creating OAS dir") - err = os.MkdirAll(path.Join(genDir, "oas"), 0755) + err = os.MkdirAll(path.Join(genDir, "oas"), 0o755) //nolint:gosec // this dir is not sensitive, so we can use 0755 if err != nil { return err } @@ -158,7 +158,17 @@ func (b *Builder) Build() error { if err = cmd.Wait(); err != nil { var exitErr *exec.ExitError if errors.As(err, &exitErr) { - slog.Error("cmd.Wait", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String()) + slog.Error( + "cmd.Wait", + "code", + exitErr.ExitCode(), + "error", + err, + "stdout", + stdOut.String(), + "stderr", + stdErr.String(), + ) return fmt.Errorf("%s", stdErr.String()) } if err != nil { @@ -192,7 +202,11 @@ func (b *Builder) Build() error { } slog.Info("Rearranging package directories") - err = os.MkdirAll(path.Join(*root, "pkg_gen"), 0755) // noqa:gosec + //nolint:gosec // this dir is not sensitive, so we can use 0755 + err = os.MkdirAll( + path.Join(*root, "pkg_gen"), + 0o755, + ) if err != nil { return err } @@ -202,20 +216,21 @@ func (b *Builder) Build() error { return err } for _, item := range items { - if item.IsDir() { - slog.Info(" -> package", "name", item.Name()) - tgtDir := path.Join(*root, "pkg_gen", item.Name()) - if fileExists(tgtDir) { - delErr := os.RemoveAll(tgtDir) - if delErr != nil { - return delErr - } - } - err = os.Rename(path.Join(srcDir, item.Name()), tgtDir) - if err != nil { - return err + if !item.IsDir() { + continue + } + slog.Info(" -> package", "name", item.Name()) + tgtDir := path.Join(*root, "pkg_gen", item.Name()) + if fileExists(tgtDir) { + delErr := os.RemoveAll(tgtDir) + if delErr != nil { + return delErr } } + err = os.Rename(path.Join(srcDir, item.Name()), tgtDir) + if err != nil { + return err + } } if !b.PackagesOnly { @@ -275,8 +290,8 @@ type templateData struct { Fields []string } -func fileExists(path string) bool { - _, err := os.Stat(path) +func fileExists(pathValue string) bool { + _, err := os.Stat(pathValue) if os.IsNotExist(err) { return false } @@ -312,10 +327,22 @@ func createBoilerplate(rootFolder, folder string) error { resourceName := res.Name() - dsFile := path.Join(folder, svc.Name(), res.Name(), "datasources_gen", fmt.Sprintf("%s_data_source_gen.go", res.Name())) + dsFile := path.Join( + folder, + svc.Name(), + res.Name(), + "datasources_gen", + fmt.Sprintf("%s_data_source_gen.go", res.Name()), + ) handleDS = fileExists(dsFile) - resFile := path.Join(folder, svc.Name(), res.Name(), "resources_gen", fmt.Sprintf("%s_resource_gen.go", res.Name())) + resFile := path.Join( + folder, + svc.Name(), + res.Name(), + "resources_gen", + fmt.Sprintf("%s_resource_gen.go", res.Name()), + ) handleRes = fileExists(resFile) dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go") @@ -407,7 +434,6 @@ func createBoilerplate(rootFolder, folder string) error { if err != nil { return err } - } } } @@ -416,7 +442,7 @@ func createBoilerplate(rootFolder, folder string) error { } func ucfirst(s string) string { - if len(s) == 0 { + if s == "" { return "" } return strings.ToUpper(s[:1]) + s[1:] @@ -451,8 +477,8 @@ func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) e } func generateServiceFiles(rootDir, generatorDir string) error { - // slog.Info("Generating specs folder") - err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0755) + //nolint:gosec // this file is not sensitive, so we can use 0755 + err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0o755) if err != nil { return err } @@ -490,7 +516,6 @@ func generateServiceFiles(rootDir, generatorDir string) error { continue } - // slog.Info("Checking spec", "name", spec.Name()) r := regexp.MustCompile(`^(.*)_config.yml$`) matches := r.FindAllStringSubmatch(specFile.Name(), -1) if matches != nil { @@ -506,27 +531,44 @@ func generateServiceFiles(rootDir, generatorDir string) error { resource, ) - oasFile := path.Join(generatorDir, "oas", fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name())) + oasFile := path.Join( + generatorDir, + "oas", + fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()), + ) if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) { - slog.Warn(" could not find matching oas", "svc", service.Name(), "version", svcVersion.Name()) + slog.Warn( + " could not find matching oas", + "svc", + service.Name(), + "version", + svcVersion.Name(), + ) continue } scName := fmt.Sprintf("%s%s", service.Name(), svcVersion.Name()) scName = strings.ReplaceAll(scName, "-", "") - err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0755) + //nolint:gosec // this file is not sensitive, so we can use 0755 + err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0o755) if err != nil { return err } - // slog.Info("Generating openapi spec json") - specJsonFile := path.Join(rootDir, "generated", "specs", fmt.Sprintf("%s_%s_spec.json", scName, resource)) + specJsonFile := path.Join( + rootDir, + "generated", + "specs", + fmt.Sprintf("%s_%s_spec.json", scName, resource), + ) var stdOut, stdErr bytes.Buffer - // noqa:gosec + // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning cmd := exec.Command( - "tfplugingen-openapi", + "go", + "run", + "github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi", "generate", "--config", path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name(), fileName), @@ -553,11 +595,29 @@ func generateServiceFiles(rootDir, generatorDir string) error { if err = cmd.Wait(); err != nil { var exitErr *exec.ExitError if errors.As(err, &exitErr) { - slog.Error("tfplugingen-openapi generate", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String()) + slog.Error( + "tfplugingen-openapi generate", + "code", + exitErr.ExitCode(), + "error", + err, + "stdout", + stdOut.String(), + "stderr", + stdErr.String(), + ) return fmt.Errorf("%s", stdErr.String()) } if err != nil { - slog.Error("tfplugingen-openapi generate", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String()) + slog.Error( + "tfplugingen-openapi generate", + "err", + err, + "stdout", + stdOut.String(), + "stderr", + stdErr.String(), + ) return err } } @@ -565,18 +625,26 @@ func generateServiceFiles(rootDir, generatorDir string) error { slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String()) } - // slog.Info("Creating terraform svc resource files folder") - tgtFolder := path.Join(rootDir, "generated", "internal", "services", scName, resource, "resources_gen") - err = os.MkdirAll(tgtFolder, 0755) + tgtFolder := path.Join( + rootDir, + "generated", + "internal", + "services", + scName, + resource, + "resources_gen", + ) + //nolint:gosec // this file is not sensitive, so we can use 0755 + err = os.MkdirAll(tgtFolder, 0o755) if err != nil { return err } - // slog.Info("Generating terraform svc resource files") - - // noqa:gosec + // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning cmd2 := exec.Command( - "tfplugingen-framework", + "go", + "run", + "github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework", "generate", "resources", "--input", @@ -597,27 +665,53 @@ func generateServiceFiles(rootDir, generatorDir string) error { if err = cmd2.Wait(); err != nil { var exitErr *exec.ExitError if errors.As(err, &exitErr) { - slog.Error("tfplugingen-framework generate resources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String()) + slog.Error( + "tfplugingen-framework generate resources", + "code", + exitErr.ExitCode(), + "error", + err, + "stdout", + stdOut.String(), + "stderr", + stdErr.String(), + ) return fmt.Errorf("%s", stdErr.String()) } if err != nil { - slog.Error("tfplugingen-framework generate resources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String()) + slog.Error( + "tfplugingen-framework generate resources", + "err", + err, + "stdout", + stdOut.String(), + "stderr", + stdErr.String(), + ) return err } } - // slog.Info("Creating terraform svc datasource files folder") - tgtFolder = path.Join(rootDir, "generated", "internal", "services", scName, resource, "datasources_gen") - err = os.MkdirAll(tgtFolder, 0755) + tgtFolder = path.Join( + rootDir, + "generated", + "internal", + "services", + scName, + resource, + "datasources_gen", + ) + //nolint:gosec // this directory is not sensitive, so we can use 0755 + err = os.MkdirAll(tgtFolder, 0o755) if err != nil { return err } - // slog.Info("Generating terraform svc resource files") - - // noqa:gosec + // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning cmd3 := exec.Command( - "tfplugingen-framework", + "go", + "run", + "github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework", "generate", "data-sources", "--input", @@ -639,11 +733,29 @@ func generateServiceFiles(rootDir, generatorDir string) error { if err = cmd3.Wait(); err != nil { var exitErr *exec.ExitError if errors.As(err, &exitErr) { - slog.Error("tfplugingen-framework generate data-sources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String()) + slog.Error( + "tfplugingen-framework generate data-sources", + "code", + exitErr.ExitCode(), + "error", + err, + "stdout", + stdOut.String(), + "stderr", + stdErr.String(), + ) return fmt.Errorf("%s", stdErr.String()) } if err != nil { - slog.Error("tfplugingen-framework generate data-sources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String()) + slog.Error( + "tfplugingen-framework generate data-sources", + "err", + err, + "stdout", + stdOut.String(), + "stderr", + stdErr.String(), + ) return err } } @@ -674,10 +786,10 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error { if err != nil { return err } - defer f.Close() root, err := getRoot() if err != nil { + //nolint:gocritic // in this case, we want to log the error and exit, as we cannot proceed without the root directory log.Fatal(err) } @@ -685,7 +797,6 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error { if err != nil { return err } - defer tmp.Close() sc := bufio.NewScanner(f) for sc.Scan() { @@ -709,6 +820,7 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error { return err } + //nolint:gosec // path traversal is not a concern here if err := os.Rename(tmp.Name(), filePath); err != nil { log.Fatal(err) } @@ -773,13 +885,23 @@ func copyFile(src, dst string) (int64, error) { if err != nil { return 0, err } - defer source.Close() + defer func(source *os.File) { + err := source.Close() + if err != nil { + slog.Error("copyFile", "err", err) + } + }(source) destination, err := os.Create(dst) if err != nil { return 0, err } - defer destination.Close() + defer func(destination *os.File) { + err := destination.Close() + if err != nil { + slog.Error("copyFile", "err", err) + } + }(destination) nBytes, err := io.Copy(destination, source) return nBytes, err } @@ -790,10 +912,8 @@ func getOnlyLatest(m map[string]version) (map[string]version, error) { item, ok := tmpMap[k] if !ok { tmpMap[k] = v - } else { - if item.major == v.major && item.minor < v.minor { - tmpMap[k] = v - } + } else if item.major == v.major && item.minor < v.minor { + tmpMap[k] = v } } return tmpMap, nil @@ -807,18 +927,19 @@ func getVersions(dir string) (map[string]version, error) { } for _, entry := range children { - if entry.IsDir() { - versions, err := os.ReadDir(path.Join(dir, "services", entry.Name())) - if err != nil { - return nil, err - } - m, err2 := extractVersions(entry.Name(), versions) - if err2 != nil { - return m, err2 - } - for k, v := range m { - res[k] = v - } + if !entry.IsDir() { + continue + } + versions, err := os.ReadDir(path.Join(dir, "services", entry.Name())) + if err != nil { + return nil, err + } + m, err2 := extractVersions(entry.Name(), versions) + if err2 != nil { + return m, err2 + } + for k, v := range m { + res[k] = v } } return res, nil @@ -827,20 +948,21 @@ func getVersions(dir string) (map[string]version, error) { func extractVersions(service string, versionDirs []os.DirEntry) (map[string]version, error) { res := make(map[string]version) for _, vDir := range versionDirs { - if vDir.IsDir() { - r := regexp.MustCompile(`v([0-9]+)([a-z]+)([0-9]*)`) - matches := r.FindAllStringSubmatch(vDir.Name(), -1) - if matches == nil { - continue - } - svc, ver, err := handleVersion(service, matches[0]) - if err != nil { - return nil, err - } + if !vDir.IsDir() { + continue + } + r := regexp.MustCompile(`v(\d+)([a-z]+)(\d*)`) + matches := r.FindAllStringSubmatch(vDir.Name(), -1) + if matches == nil { + continue + } + svc, ver, err := handleVersion(service, matches[0]) + if err != nil { + return nil, err + } - if svc != nil && ver != nil { - res[*svc] = *ver - } + if svc != nil && ver != nil { + res[*svc] = *ver } } return res, nil @@ -927,30 +1049,25 @@ func getTokens(fileName string) ([]string, error) { return nil, err } - ast.Inspect(node, func(n ast.Node) bool { - // Suche nach Typ-Deklarationen (structs) - ts, ok := n.(*ast.TypeSpec) - if ok { - if strings.Contains(ts.Name.Name, "Model") { - // fmt.Printf("found model: %s\n", ts.Name.Name) - ast.Inspect(ts, func(sn ast.Node) bool { - tts, tok := sn.(*ast.Field) - if tok { - // fmt.Printf(" found: %+v\n", tts.Names[0]) - // spew.Dump(tts.Type) - - result = append(result, tts.Names[0].String()) - - // fld, fldOk := tts.Type.(*ast.Ident) - //if fldOk { - // fmt.Printf("type: %+v\n", fld) - //} - } - return true - }) + ast.Inspect( + node, func(n ast.Node) bool { + // Suche nach Typ-Deklarationen (structs) + ts, ok := n.(*ast.TypeSpec) + if ok { + if strings.Contains(ts.Name.Name, "Model") { + ast.Inspect( + ts, func(sn ast.Node) bool { + tts, tok := sn.(*ast.Field) + if tok { + result = append(result, tts.Names[0].String()) + } + return true + }, + ) + } } - } - return true - }) + return true + }, + ) return result, nil } diff --git a/cmd/cmd/build/copy.go b/cmd/cmd/build/copy.go index ec0affe9..e1243c05 100644 --- a/cmd/cmd/build/copy.go +++ b/cmd/cmd/build/copy.go @@ -3,6 +3,7 @@ package build import ( "fmt" "io" + "log/slog" "os" "path/filepath" "syscall" @@ -74,14 +75,24 @@ func Copy(srcFile, dstFile string) error { return err } - defer out.Close() + defer func(out *os.File) { + err := out.Close() + if err != nil { + slog.Error("failed to close file", slog.Any("err", err)) + } + }(out) in, err := os.Open(srcFile) if err != nil { return err } - defer in.Close() + defer func(in *os.File) { + err := in.Close() + if err != nil { + slog.Error("error closing destination file", slog.Any("err", err)) + } + }(in) _, err = io.Copy(out, in) if err != nil { diff --git a/cmd/cmd/buildCmd.go b/cmd/cmd/buildCmd.go index b9f4c835..e3e8e7ae 100644 --- a/cmd/cmd/buildCmd.go +++ b/cmd/cmd/buildCmd.go @@ -16,7 +16,7 @@ var buildCmd = &cobra.Command{ Use: "build", Short: "Build the necessary boilerplate", Long: `...`, - RunE: func(cmd *cobra.Command, args []string) error { + RunE: func(_ *cobra.Command, _ []string) error { b := build.Builder{ SkipClone: skipClone, SkipCleanup: skipCleanup, @@ -30,7 +30,7 @@ func NewBuildCmd() *cobra.Command { return buildCmd } -func init() { // nolint: gochecknoinits +func init() { //nolint:gochecknoinits // This is the standard way to set up Cobra commands buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps") buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git") buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages") diff --git a/cmd/cmd/examplesCmd.go b/cmd/cmd/examplesCmd.go index 6c95a799..a4c75962 100644 --- a/cmd/cmd/examplesCmd.go +++ b/cmd/cmd/examplesCmd.go @@ -12,16 +12,15 @@ var examplesCmd = &cobra.Command{ Use: "examples", Short: "create examples", Long: `...`, - RunE: func(cmd *cobra.Command, args []string) error { - - //filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go" + RunE: func(_ *cobra.Command, _ []string) error { + // filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go" // - //src, err := os.ReadFile(filePathStr) - //if err != nil { + // src, err := os.ReadFile(filePathStr) + // if err != nil { // return err //} // - //i := interp.New( + // i := interp.New( // interp.Options{ // GoPath: "/home/henselinm/.asdf/installs/golang/1.25.6/packages", // BuildTags: nil, @@ -34,46 +33,46 @@ var examplesCmd = &cobra.Command{ // Unrestricted: false, // }, //) - //err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators")) - //if err != nil { + // err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators")) + // if err != nil { // return err //} - //err = i.Use(stdlib.Symbols) - //if err != nil { + // err = i.Use(stdlib.Symbols) + // if err != nil { // return err //} - //_, err = i.Eval(string(src)) - //if err != nil { + // _, err = i.Eval(string(src)) + // if err != nil { // return err //} // - //v, err := i.Eval("DatabaseDataSourceSchema") - //if err != nil { + // v, err := i.Eval("DatabaseDataSourceSchema") + // if err != nil { // return err //} // - //bar := v.Interface().(func(string) string) + // bar := v.Interface().(func(string) string) // - //r := bar("Kung") - //println(r) + // r := bar("Kung") + // println(r) // - //evalPath, err := i.EvalPath(filePathStr) - //if err != nil { + // evalPath, err := i.EvalPath(filePathStr) + // if err != nil { // return err //} // - //fmt.Printf("%+v\n", evalPath) + // fmt.Printf("%+v\n", evalPath) - //_, err = i.Eval(`import "fmt"`) - //if err != nil { + // _, err = i.Eval(`import "fmt"`) + // if err != nil { // return err //} - //_, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`) - //if err != nil { + // _, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`) + // if err != nil { // return err //} - //v = i.Symbols("Hallo") + // v = i.Symbols("Hallo") // fmt.Println(v) return workServices() @@ -110,6 +109,6 @@ func NewExamplesCmd() *cobra.Command { return examplesCmd } -//func init() { // nolint: gochecknoinits +// func init() { // nolint: gochecknoinits // examplesCmd.Flags().BoolVarP(&example, "example", "e", false, "example") //} diff --git a/cmd/cmd/getFieldsCmd.go b/cmd/cmd/getFieldsCmd.go index 48cb379a..06fe9e66 100644 --- a/cmd/cmd/getFieldsCmd.go +++ b/cmd/cmd/getFieldsCmd.go @@ -24,7 +24,7 @@ var getFieldsCmd = &cobra.Command{ Use: "get-fields", Short: "get fields from file", Long: `...`, - PreRunE: func(cmd *cobra.Command, args []string) error { + PreRunE: func(_ *cobra.Command, _ []string) error { typeStr := "data_source" if resType != "resource" && resType != "datasource" { return fmt.Errorf("--type can only be resource or datasource") @@ -76,13 +76,13 @@ var getFieldsCmd = &cobra.Command{ //// Enum check // switch format { - //case "json", "yaml": + // case "json", "yaml": //default: // return fmt.Errorf("invalid --format: %s (want json|yaml)", format) //} return nil }, - RunE: func(cmd *cobra.Command, args []string) error { + RunE: func(_ *cobra.Command, _ []string) error { return getFields(filePath) }, } @@ -107,31 +107,26 @@ func getTokens(fileName string) ([]string, error) { return nil, err } - ast.Inspect(node, func(n ast.Node) bool { - // Suche nach Typ-Deklarationen (structs) - ts, ok := n.(*ast.TypeSpec) - if ok { - if strings.Contains(ts.Name.Name, "Model") { - // fmt.Printf("found model: %s\n", ts.Name.Name) - ast.Inspect(ts, func(sn ast.Node) bool { - tts, tok := sn.(*ast.Field) - if tok { - // fmt.Printf(" found: %+v\n", tts.Names[0]) - // spew.Dump(tts.Type) - - result = append(result, tts.Names[0].String()) - - // fld, fldOk := tts.Type.(*ast.Ident) - //if fldOk { - // fmt.Printf("type: %+v\n", fld) - //} - } - return true - }) + ast.Inspect( + node, func(n ast.Node) bool { + // Suche nach Typ-Deklarationen (structs) + ts, ok := n.(*ast.TypeSpec) + if ok { + if strings.Contains(ts.Name.Name, "Model") { + ast.Inspect( + ts, func(sn ast.Node) bool { + tts, tok := sn.(*ast.Field) + if tok { + result = append(result, tts.Names[0].String()) + } + return true + }, + ) + } } - } - return true - }) + return true + }, + ) return result, nil } @@ -139,9 +134,15 @@ func NewGetFieldsCmd() *cobra.Command { return getFieldsCmd } -func init() { // nolint: gochecknoinits +func init() { //nolint:gochecknoinits //this is the only way to add the command to the rootCmd getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path") getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name") getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name") - getFieldsCmd.Flags().StringVarP(&resType, "type", "t", "resource", "resource type (data-source or resource [default])") + getFieldsCmd.Flags().StringVarP( + &resType, + "type", + "t", + "resource", + "resource type (data-source or resource [default])", + ) } diff --git a/cmd/cmd/publish/architecture.go b/cmd/cmd/publish/architecture.go index 5fffa585..7316a03d 100644 --- a/cmd/cmd/publish/architecture.go +++ b/cmd/cmd/publish/architecture.go @@ -35,36 +35,27 @@ type GpgPublicKey struct { } func (p *Provider) CreateArchitectureFiles() error { - // var namespace, provider, distPath, repoName, version, gpgFingerprint, gpgPubKeyFile, domain string - log.Println("* Creating architecture files in target directories") - // filename = terraform-provider-[provider]_0.0.1_darwin_amd64.zip - provider_name + version + target + architecture + .zip - // prefix := fmt.Sprintf("v1/providers/%s/%s/%s/", namespace, provider, version) prefix := path.Join("v1", "providers", p.Namespace, p.Provider, p.Version) - // pathPrefix := fmt.Sprintf("release/%s", prefix) pathPrefix := path.Join("release", prefix) - // urlPrefix := fmt.Sprintf("https://%s/%s", domain, prefix) urlPrefix, err := url.JoinPath("https://", p.Domain, prefix) if err != nil { return fmt.Errorf("error creating base url: %w", err) } - // download url = https://example.com/v1/providers/namespace/provider/0.0.1/download/terraform-provider_0.0.1_darwin_amd64.zip downloadUrlPrefix, err := url.JoinPath(urlPrefix, "download") if err != nil { return fmt.Errorf("error crearting download url: %w", err) } downloadPathPrefix := path.Join(pathPrefix, "download") - // shasums url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS shasumsUrl, err := url.JoinPath(urlPrefix, fmt.Sprintf("%s_%s_SHA256SUMS", p.RepoName, p.Version)) if err != nil { return fmt.Errorf("error creating shasums url: %w", err) } - // shasums_signature_url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS.sig shasumsSigUrl := shasumsUrl + ".sig" gpgAsciiPub, err := p.ReadGpgFile() @@ -116,33 +107,6 @@ func (p *Provider) CreateArchitectureFiles() error { }, }, } - // var architectureTemplate = []byte(fmt.Sprintf(` - //{ - // "protocols": [ - // "4.0", - // "5.1", - // "6.0" - // ], - // "os": "%s", - // "arch": "%s", - // "filename": "%s", - // "download_url": "%s", - // "shasums_url": "%s", - // "shasums_signature_url": "%s", - // "shasum": "%s", - // "signing_keys": { - // "gpg_public_keys": [ - // { - // "key_id": "%s", - // "ascii_armor": "%s", - // "trust_signature": "", - // "source": "", - // "source_url": "" - // } - // ] - // } - //} - // `, target, arch, fileName, downloadUrl, shasumsUrl, shasumsSigUrl, shasum, gpgFingerprint, gpgAsciiPub)) log.Printf(" - Arch file: %s", archFileName) @@ -160,8 +124,12 @@ func WriteArchitectureFile(filePath string, arch Architecture) error { if err != nil { return fmt.Errorf("error encoding data: %w", err) } - - err = os.WriteFile(filePath, jsonString, os.ModePerm) + //nolint:gosec // this file is not sensitive, so we can use os.ModePerm + err = os.WriteFile( + filePath, + jsonString, + os.ModePerm, + ) if err != nil { return fmt.Errorf("error writing data: %w", err) } diff --git a/cmd/cmd/publish/provider.go b/cmd/cmd/publish/provider.go index 36d7af1d..88849eb0 100644 --- a/cmd/cmd/publish/provider.go +++ b/cmd/cmd/publish/provider.go @@ -161,10 +161,12 @@ func (p *Provider) createVersionsFile() error { target := fileNameSplit[2] arch := fileNameSplit[3] - version.Platforms = append(version.Platforms, Platform{ - OS: target, - Arch: arch, - }) + version.Platforms = append( + version.Platforms, Platform{ + OS: target, + Arch: arch, + }, + ) } data := Data{} @@ -206,16 +208,19 @@ func (p *Provider) CreateWellKnown() error { log.Println("* Creating .well-known directory") pathString := path.Join(p.RootPath, "release", ".well-known") + //nolint:gosec // this file is not sensitive, so we can use ModePerm err := os.MkdirAll(pathString, os.ModePerm) if err != nil && !errors.Is(err, fs.ErrExist) { return fmt.Errorf("error creating '%s' dir: %w", pathString, err) } log.Println(" - Writing to .well-known/terraform.json file") + + //nolint:gosec // this file is not sensitive, so we can use 0644 err = os.WriteFile( fmt.Sprintf("%s/terraform.json", pathString), []byte(`{"providers.v1": "/v1/providers/"}`), - 0644, + 0o644, ) if err != nil { return err @@ -224,9 +229,10 @@ func (p *Provider) CreateWellKnown() error { return nil } -func CreateDir(path string) error { - log.Printf("* Creating %s directory", path) - err := os.MkdirAll(path, os.ModePerm) +func CreateDir(pathValue string) error { + log.Printf("* Creating %s directory", pathValue) + //nolint:gosec // this file is not sensitive, so we can use ModePerm + err := os.MkdirAll(pathValue, os.ModePerm) if errors.Is(err, fs.ErrExist) { return nil } @@ -269,13 +275,23 @@ func CopyFile(src, dst string) (int64, error) { if err != nil { return 0, err } - defer source.Close() + defer func(source *os.File) { + err := source.Close() + if err != nil { + slog.Error("error closing source file", slog.Any("err", err)) + } + }(source) destination, err := os.Create(dst) if err != nil { return 0, err } - defer destination.Close() + defer func(destination *os.File) { + err := destination.Close() + if err != nil { + slog.Error("error closing destination file", slog.Any("err", err)) + } + }(destination) nBytes, err := io.Copy(destination, source) return nBytes, err } diff --git a/cmd/cmd/publish/versions.go b/cmd/cmd/publish/versions.go index 397afa15..5f75d45d 100644 --- a/cmd/cmd/publish/versions.go +++ b/cmd/cmd/publish/versions.go @@ -35,7 +35,12 @@ func (d *Data) WriteToFile(filePath string) error { return fmt.Errorf("error encoding data: %w", err) } - err = os.WriteFile(filePath, jsonString, os.ModePerm) + //nolint:gosec // this file is not sensitive, so we can use os.ModePerm + err = os.WriteFile( + filePath, + jsonString, + os.ModePerm, + ) if err != nil { return fmt.Errorf("error writing data: %w", err) } @@ -86,7 +91,13 @@ func (d *Data) LoadFromUrl(uri string) error { if err != nil { return err } - defer os.Remove(file.Name()) // Clean up + defer func(name string) { + //nolint:gosec // The file path is generated by os.CreateTemp and is not user-controllable + err := os.Remove(name) + if err != nil { + slog.Error("failed to remove temporary file", slog.Any("err", err)) + } + }(file.Name()) // Clean up err = DownloadFile( u.String(), @@ -123,20 +134,30 @@ func (v *Version) AddProtocol(p string) error { // DownloadFile will download a url and store it in local filepath. // It writes to the destination file as it downloads it, without // loading the entire file into memory. -func DownloadFile(url string, filepath string) error { +func DownloadFile(urlValue, filepath string) error { // Create the file + //nolint:gosec // path traversal is not a concern here, as the filepath is generated by us and not user input out, err := os.Create(filepath) if err != nil { return err } - defer out.Close() + defer func(out *os.File) { + err := out.Close() + if err != nil { + slog.Error("failed to close file", slog.Any("err", err)) + } + }(out) // Get the data - resp, err := http.Get(url) + + //nolint:gosec,bodyclose // this is a controlled URL, not user input + resp, err := http.Get(urlValue) if err != nil { return err } - defer resp.Body.Close() + defer func(Body io.ReadCloser) { + _ = Body.Close() + }(resp.Body) // Write the body to file _, err = io.Copy(out, resp.Body) diff --git a/cmd/cmd/publishCmd.go b/cmd/cmd/publishCmd.go index 4849ba4b..a428d436 100644 --- a/cmd/cmd/publishCmd.go +++ b/cmd/cmd/publishCmd.go @@ -29,20 +29,32 @@ var publishCmd = &cobra.Command{ Use: "publish", Short: "Publish terraform provider", Long: `...`, - RunE: func(_ *cobra.Command, args []string) error { + RunE: func(_ *cobra.Command, _ []string) error { return publish() }, } -func init() { // nolint: gochecknoinits +func init() { //nolint:gochecknoinits //this is the standard way to set up cobra commands publishCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace for the Terraform registry.") publishCmd.Flags().StringVarP(&domain, "domain", "d", "", "Domain for the Terraform registry.") publishCmd.Flags().StringVarP(&providerName, "providerName", "p", "", "ProviderName for the Terraform registry.") publishCmd.Flags().StringVarP(&distPath, "distPath", "x", "dist", "Dist Path for the Terraform registry.") publishCmd.Flags().StringVarP(&repoName, "repoName", "r", "", "RepoName for the Terraform registry.") publishCmd.Flags().StringVarP(&version, "version", "v", "", "Version for the Terraform registry.") - publishCmd.Flags().StringVarP(&gpgFingerprint, "gpgFingerprint", "f", "", "GPG Fingerprint for the Terraform registry.") - publishCmd.Flags().StringVarP(&gpgPubKeyFile, "gpgPubKeyFile", "k", "", "GPG PubKey file name for the Terraform registry.") + publishCmd.Flags().StringVarP( + &gpgFingerprint, + "gpgFingerprint", + "f", + "", + "GPG Fingerprint for the Terraform registry.", + ) + publishCmd.Flags().StringVarP( + &gpgPubKeyFile, + "gpgPubKeyFile", + "k", + "", + "GPG PubKey file name for the Terraform registry.", + ) err := publishCmd.MarkFlagRequired("namespace") if err != nil { @@ -105,6 +117,7 @@ func publish() error { // Create release dir - only the contents of this need to be uploaded to S3 log.Printf("* Creating release directory") + //nolint:gosec // this directory is not sensitive, so we can use 0750 err = os.MkdirAll(path.Join(p.RootPath, "release"), os.ModePerm) if err != nil && !errors.Is(err, fs.ErrExist) { return fmt.Errorf("error creating '%s' dir: %w", path.Join(p.RootPath, "release"), err) diff --git a/docs/data-sources/postgresflexalpha_database.md b/docs/data-sources/postgresflexalpha_database.md deleted file mode 100644 index 95c115e3..00000000 --- a/docs/data-sources/postgresflexalpha_database.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_postgresflexalpha_database Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_postgresflexalpha_database (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_postgresflexalpha_database" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - database_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -} -``` - - -## Schema - -### Required - -- `database_id` (Number) The ID of the database. -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".", -- `name` (String) The name of the database. -- `owner` (String) The owner of the database. -- `tf_original_api_id` (Number) The id of the database. diff --git a/docs/data-sources/postgresflexalpha_flavor.md b/docs/data-sources/postgresflexalpha_flavor.md deleted file mode 100644 index 24c79829..00000000 --- a/docs/data-sources/postgresflexalpha_flavor.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_postgresflexalpha_flavor Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_postgresflexalpha_flavor (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" { - project_id = var.project_id - region = var.region - cpu = 4 - ram = 16 - node_type = "Single" - storage_class = "premium-perf2-stackit" -} -``` - - -## Schema - -### Required - -- `cpu` (Number) The cpu count of the instance. -- `node_type` (String) defines the nodeType it can be either single or replica -- `project_id` (String) The cpu count of the instance. -- `ram` (Number) The memory of the instance in Gibibyte. -- `region` (String) The flavor description. -- `storage_class` (String) The memory of the instance in Gibibyte. - -### Read-Only - -- `description` (String) The flavor description. -- `flavor_id` (String) The flavor id of the instance flavor. -- `id` (String) The terraform id of the instance flavor. -- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte. -- `min_gb` (Number) minimum storage which is required to order in Gigabyte. -- `storage_classes` (Attributes List) (see [below for nested schema](#nestedatt--storage_classes)) - - -### Nested Schema for `storage_classes` - -Read-Only: - -- `class` (String) -- `max_io_per_sec` (Number) -- `max_through_in_mb` (Number) diff --git a/docs/data-sources/postgresflexalpha_flavors.md b/docs/data-sources/postgresflexalpha_flavors.md deleted file mode 100644 index 06645bb4..00000000 --- a/docs/data-sources/postgresflexalpha_flavors.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_postgresflexalpha_flavors Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_postgresflexalpha_flavors (Data Source) - - - - - - -## Schema - -### Required - -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Optional - -- `page` (Number) Number of the page of items list to be returned. -- `size` (Number) Number of items to be returned on each page. -- `sort` (String) Sorting of the flavors to be returned on each page. - -### Read-Only - -- `flavors` (Attributes List) List of flavors available for the project. (see [below for nested schema](#nestedatt--flavors)) -- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination)) - - -### Nested Schema for `flavors` - -Read-Only: - -- `cpu` (Number) The cpu count of the instance. -- `description` (String) The flavor description. -- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte. -- `memory` (Number) The memory of the instance in Gibibyte. -- `min_gb` (Number) minimum storage which is required to order in Gigabyte. -- `node_type` (String) defines the nodeType it can be either single or replica -- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--flavors--storage_classes)) -- `tf_original_api_id` (String) The id of the instance flavor. - - -### Nested Schema for `flavors.storage_classes` - -Read-Only: - -- `class` (String) -- `max_io_per_sec` (Number) -- `max_through_in_mb` (Number) - - - - -### Nested Schema for `pagination` - -Read-Only: - -- `page` (Number) -- `size` (Number) -- `sort` (String) -- `total_pages` (Number) -- `total_rows` (Number) diff --git a/docs/data-sources/postgresflexalpha_instance.md b/docs/data-sources/postgresflexalpha_instance.md deleted file mode 100644 index 466745a6..00000000 --- a/docs/data-sources/postgresflexalpha_instance.md +++ /dev/null @@ -1,87 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_postgresflexalpha_instance Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_postgresflexalpha_instance (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_postgresflexalpha_instance" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -} -``` - - -## Schema - -### Required - -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `acl` (List of String) List of IPV4 cidr. -- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule. -- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info)) -- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption. - -⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption)) -- `flavor_id` (String) The id of the instance flavor. -- `is_deletable` (Boolean) Whether the instance can be deleted or not. -- `name` (String) The name of the instance. -- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) -- `replicas` (Number) How many replicas the instance should have. -- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days. -- `status` (String) The current status of the instance. -- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) -- `tf_original_api_id` (String) The ID of the instance. -- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters. - - -### Nested Schema for `connection_info` - -Read-Only: - -- `host` (String) The host of the instance. -- `port` (Number) The port of the instance. - - - -### Nested Schema for `encryption` - -Read-Only: - -- `kek_key_id` (String) The encryption-key key identifier -- `kek_key_ring_id` (String) The encryption-key keyring identifier -- `kek_key_version` (String) The encryption-key version -- `service_account` (String) - - - -### Nested Schema for `network` - -Read-Only: - -- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped. -- `acl` (List of String) List of IPV4 cidr. -- `instance_address` (String) -- `router_address` (String) - - - -### Nested Schema for `storage` - -Read-Only: - -- `performance_class` (String) The storage class for the storage. -- `size` (Number) The storage size in Gigabytes. diff --git a/docs/data-sources/postgresflexalpha_user.md b/docs/data-sources/postgresflexalpha_user.md deleted file mode 100644 index c3553c7b..00000000 --- a/docs/data-sources/postgresflexalpha_user.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_postgresflexalpha_user Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_postgresflexalpha_user (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_postgresflexalpha_user" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -} -``` - - -## Schema - -### Required - -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed -- `user_id` (Number) The ID of the user. - -### Optional - -- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".", - -### Read-Only - -- `name` (String) The name of the user. -- `roles` (List of String) A list of user roles. -- `status` (String) The current status of the user. -- `tf_original_api_id` (Number) The ID of the user. diff --git a/docs/data-sources/sqlserverflexalpha_database.md b/docs/data-sources/sqlserverflexalpha_database.md deleted file mode 100644 index df66ffb7..00000000 --- a/docs/data-sources/sqlserverflexalpha_database.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexalpha_database Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexalpha_database (Data Source) - - - - - - -## Schema - -### Required - -- `database_name` (String) The name of the database. -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. -- `compatibility_level` (Number) CompatibilityLevel of the Database. -- `id` (String) The terraform internal identifier. -- `name` (String) The name of the database. -- `owner` (String) The owner of the database. -- `tf_original_api_id` (Number) The id of the database. diff --git a/docs/data-sources/sqlserverflexalpha_flavor.md b/docs/data-sources/sqlserverflexalpha_flavor.md deleted file mode 100644 index 7a03ecfb..00000000 --- a/docs/data-sources/sqlserverflexalpha_flavor.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexalpha_flavor Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexalpha_flavor (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" { - project_id = var.project_id - region = var.region - cpu = 4 - ram = 16 - node_type = "Single" - storage_class = "premium-perf2-stackit" -} -``` - - -## Schema - -### Required - -- `cpu` (Number) The cpu count of the instance. -- `node_type` (String) defines the nodeType it can be either single or HA -- `project_id` (String) The project ID of the flavor. -- `ram` (Number) The memory of the instance in Gibibyte. -- `region` (String) The region of the flavor. -- `storage_class` (String) The memory of the instance in Gibibyte. - -### Read-Only - -- `description` (String) The flavor description. -- `flavor_id` (String) The id of the instance flavor. -- `id` (String) The id of the instance flavor. -- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte. -- `min_gb` (Number) minimum storage which is required to order in Gigabyte. -- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes)) - - -### Nested Schema for `storage_classes` - -Read-Only: - -- `class` (String) -- `max_io_per_sec` (Number) -- `max_through_in_mb` (Number) diff --git a/docs/data-sources/sqlserverflexalpha_instance.md b/docs/data-sources/sqlserverflexalpha_instance.md deleted file mode 100644 index b05d7b8e..00000000 --- a/docs/data-sources/sqlserverflexalpha_instance.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexalpha_instance Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexalpha_instance (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_sqlserverflexalpha_instance" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -} -``` - - -## Schema - -### Required - -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule. -- `edition` (String) Edition of the MSSQL server instance -- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) -- `flavor_id` (String) The id of the instance flavor. -- `is_deletable` (Boolean) Whether the instance can be deleted or not. -- `name` (String) The name of the instance. -- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) -- `replicas` (Number) How many replicas the instance should have. -- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365 -- `status` (String) -- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) -- `tf_original_api_id` (String) The ID of the instance. -- `version` (String) The sqlserver version used for the instance. - - -### Nested Schema for `encryption` - -Read-Only: - -- `kek_key_id` (String) The key identifier -- `kek_key_ring_id` (String) The keyring identifier -- `kek_key_version` (String) The key version -- `service_account` (String) - - - -### Nested Schema for `network` - -Read-Only: - -- `access_scope` (String) The network access scope of the instance - -⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. -- `acl` (List of String) List of IPV4 cidr. -- `instance_address` (String) -- `router_address` (String) - - - -### Nested Schema for `storage` - -Read-Only: - -- `class` (String) The storage class for the storage. -- `size` (Number) The storage size in Gigabytes. diff --git a/docs/data-sources/sqlserverflexalpha_user.md b/docs/data-sources/sqlserverflexalpha_user.md deleted file mode 100644 index 63526135..00000000 --- a/docs/data-sources/sqlserverflexalpha_user.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexalpha_user (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_sqlserverflexalpha_user" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -} -``` - - -## Schema - -### Required - -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Optional - -- `page` (Number) Number of the page of items list to be returned. -- `size` (Number) Number of items to be returned on each page. -- `sort` (String) Sorting of the users to be returned on each page. - -### Read-Only - -- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination)) -- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users)) - - -### Nested Schema for `pagination` - -Read-Only: - -- `page` (Number) -- `size` (Number) -- `sort` (String) -- `total_pages` (Number) -- `total_rows` (Number) - - - -### Nested Schema for `users` - -Read-Only: - -- `status` (String) The current status of the user. -- `tf_original_api_id` (Number) The ID of the user. -- `username` (String) The name of the user. diff --git a/docs/data-sources/sqlserverflexbeta_database.md b/docs/data-sources/sqlserverflexbeta_database.md deleted file mode 100644 index 9322049f..00000000 --- a/docs/data-sources/sqlserverflexbeta_database.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexbeta_database Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexbeta_database (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_sqlserverflexbeta_database" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - database_name = "dbname" -} -``` - - -## Schema - -### Required - -- `database_name` (String) The name of the database. -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. -- `compatibility_level` (Number) CompatibilityLevel of the Database. -- `id` (String) The terraform internal identifier. -- `name` (String) The name of the database. -- `owner` (String) The owner of the database. -- `tf_original_api_id` (Number) The id of the database. diff --git a/docs/data-sources/sqlserverflexbeta_flavor.md b/docs/data-sources/sqlserverflexbeta_flavor.md deleted file mode 100644 index 4d2a32f3..00000000 --- a/docs/data-sources/sqlserverflexbeta_flavor.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" { - project_id = var.project_id - region = var.region - cpu = 4 - ram = 16 - node_type = "Single" - storage_class = "premium-perf2-stackit" -} -``` - - -## Schema - -### Required - -- `cpu` (Number) The cpu count of the instance. -- `node_type` (String) defines the nodeType it can be either single or HA -- `project_id` (String) The project ID of the flavor. -- `ram` (Number) The memory of the instance in Gibibyte. -- `region` (String) The region of the flavor. -- `storage_class` (String) The memory of the instance in Gibibyte. - -### Read-Only - -- `description` (String) The flavor description. -- `flavor_id` (String) The id of the instance flavor. -- `id` (String) The id of the instance flavor. -- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte. -- `min_gb` (Number) minimum storage which is required to order in Gigabyte. -- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes)) - - -### Nested Schema for `storage_classes` - -Read-Only: - -- `class` (String) -- `max_io_per_sec` (Number) -- `max_through_in_mb` (Number) diff --git a/docs/data-sources/sqlserverflexbeta_instance.md b/docs/data-sources/sqlserverflexbeta_instance.md deleted file mode 100644 index 431f95f1..00000000 --- a/docs/data-sources/sqlserverflexbeta_instance.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexbeta_instance Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexbeta_instance (Data Source) - - - -## Example Usage - -```terraform -data "stackitprivatepreview_sqlserverflexbeta_instance" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -} -``` - - -## Schema - -### Required - -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule. -- `edition` (String) Edition of the MSSQL server instance -- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) -- `flavor_id` (String) The id of the instance flavor. -- `is_deletable` (Boolean) Whether the instance can be deleted or not. -- `name` (String) The name of the instance. -- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) -- `replicas` (Number) How many replicas the instance should have. -- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365 -- `status` (String) -- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) -- `tf_original_api_id` (String) The ID of the instance. -- `version` (String) The sqlserver version used for the instance. - - -### Nested Schema for `encryption` - -Read-Only: - -- `kek_key_id` (String) The key identifier -- `kek_key_ring_id` (String) The keyring identifier -- `kek_key_version` (String) The key version -- `service_account` (String) - - - -### Nested Schema for `network` - -Read-Only: - -- `access_scope` (String) The network access scope of the instance - -⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. -- `acl` (List of String) List of IPV4 cidr. -- `instance_address` (String) -- `router_address` (String) - - - -### Nested Schema for `storage` - -Read-Only: - -- `class` (String) The storage class for the storage. -- `size` (Number) The storage size in Gigabytes. diff --git a/docs/data-sources/sqlserverflexbeta_user.md b/docs/data-sources/sqlserverflexbeta_user.md deleted file mode 100644 index f87f454e..00000000 --- a/docs/data-sources/sqlserverflexbeta_user.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexbeta_user (Data Source) - - - - - - -## Schema - -### Required - -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Optional - -- `page` (Number) Number of the page of items list to be returned. -- `size` (Number) Number of items to be returned on each page. -- `sort` (String) Sorting of the users to be returned on each page. - -### Read-Only - -- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination)) -- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users)) - - -### Nested Schema for `pagination` - -Read-Only: - -- `page` (Number) -- `size` (Number) -- `sort` (String) -- `total_pages` (Number) -- `total_rows` (Number) - - - -### Nested Schema for `users` - -Read-Only: - -- `status` (String) The current status of the user. -- `tf_original_api_id` (Number) The ID of the user. -- `username` (String) The name of the user. diff --git a/docs/index.md b/docs/index.md deleted file mode 100644 index 84bc25b3..00000000 --- a/docs/index.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview Provider" -description: |- - ---- - -# stackitprivatepreview Provider - - - -## Example Usage - -```terraform -provider "stackitprivatepreview" { - default_region = "eu01" -} - -provider "stackitprivatepreview" { - default_region = "eu01" - service_account_key_path = "service_account.json" -} - -# Authentication - -# Key flow -provider "stackitprivatepreview" { - default_region = "eu01" - service_account_key = var.service_account_key - private_key = var.private_key -} - -# Key flow (using path) -provider "stackitprivatepreview" { - default_region = "eu01" - service_account_key_path = var.service_account_key_path - private_key_path = var.private_key_path -} -``` - - -## Schema - -### Optional - -- `authorization_custom_endpoint` (String) Custom endpoint for the Membership service -- `cdn_custom_endpoint` (String) Custom endpoint for the CDN service -- `credentials_path` (String) Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`. -- `default_region` (String) Region will be used as the default location for regional services. Not all services require a region, some are global -- `dns_custom_endpoint` (String) Custom endpoint for the DNS service -- `enable_beta_resources` (Boolean) Enable beta resources. Default is false. -- `experiments` (List of String) Enables experiments. These are unstable features without official support. More information can be found in the README. Available Experiments: iam, routing-tables, network -- `git_custom_endpoint` (String) Custom endpoint for the Git service -- `iaas_custom_endpoint` (String) Custom endpoint for the IaaS service -- `kms_custom_endpoint` (String) Custom endpoint for the KMS service -- `loadbalancer_custom_endpoint` (String) Custom endpoint for the Load Balancer service -- `logme_custom_endpoint` (String) Custom endpoint for the LogMe service -- `mariadb_custom_endpoint` (String) Custom endpoint for the MariaDB service -- `modelserving_custom_endpoint` (String) Custom endpoint for the AI Model Serving service -- `mongodbflex_custom_endpoint` (String) Custom endpoint for the MongoDB Flex service -- `objectstorage_custom_endpoint` (String) Custom endpoint for the Object Storage service -- `observability_custom_endpoint` (String) Custom endpoint for the Observability service -- `opensearch_custom_endpoint` (String) Custom endpoint for the OpenSearch service -- `postgresflex_custom_endpoint` (String) Custom endpoint for the PostgresFlex service -- `private_key` (String) Private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key. -- `private_key_path` (String) Path for the private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key. -- `rabbitmq_custom_endpoint` (String) Custom endpoint for the RabbitMQ service -- `redis_custom_endpoint` (String) Custom endpoint for the Redis service -- `region` (String, Deprecated) Region will be used as the default location for regional services. Not all services require a region, some are global -- `resourcemanager_custom_endpoint` (String) Custom endpoint for the Resource Manager service -- `scf_custom_endpoint` (String) Custom endpoint for the Cloud Foundry (SCF) service -- `secretsmanager_custom_endpoint` (String) Custom endpoint for the Secrets Manager service -- `server_backup_custom_endpoint` (String) Custom endpoint for the Server Backup service -- `server_update_custom_endpoint` (String) Custom endpoint for the Server Update service -- `service_account_custom_endpoint` (String) Custom endpoint for the Service Account service -- `service_account_email` (String, Deprecated) Service account email. It can also be set using the environment variable STACKIT_SERVICE_ACCOUNT_EMAIL. It is required if you want to use the resource manager project resource. -- `service_account_key` (String) Service account key used for authentication. If set, the key flow will be used to authenticate all operations. -- `service_account_key_path` (String) Path for the service account key used for authentication. If set, the key flow will be used to authenticate all operations. -- `service_account_token` (String, Deprecated) Token used for authentication. If set, the token flow will be used to authenticate all operations. -- `service_enablement_custom_endpoint` (String) Custom endpoint for the Service Enablement API -- `ske_custom_endpoint` (String) Custom endpoint for the Kubernetes Engine (SKE) service -- `sqlserverflex_custom_endpoint` (String) Custom endpoint for the SQL Server Flex service -- `token_custom_endpoint` (String) Custom endpoint for the token API, which is used to request access tokens when using the key flow diff --git a/docs/resources/postgresflexalpha_database.md b/docs/resources/postgresflexalpha_database.md deleted file mode 100644 index 6c94fd62..00000000 --- a/docs/resources/postgresflexalpha_database.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_postgresflexalpha_database Resource - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_postgresflexalpha_database (Resource) - - - -## Example Usage - -```terraform -resource "stackitprivatepreview_postgresflexalpha_database" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - name = "mydb" - owner = "myusername" -} - -# Only use the import statement, if you want to import an existing postgresflex database -import { - to = stackitprivatepreview_postgresflexalpha_database.import-example - id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}" -} - -import { - to = stackitprivatepreview_postgresflexalpha_database.import-example - identity = { - project_id = "project_id" - region = "region" - instance_id = "instance_id" - database_id = "database_id" - } -} -``` - - -## Schema - -### Required - -- `name` (String) The name of the database. - -### Optional - -- `database_id` (Number) The ID of the database. -- `instance_id` (String) The ID of the instance. -- `owner` (String) The owner of the database. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `id` (Number) The id of the database. diff --git a/docs/resources/postgresflexalpha_instance.md b/docs/resources/postgresflexalpha_instance.md deleted file mode 100644 index 3f682bb5..00000000 --- a/docs/resources/postgresflexalpha_instance.md +++ /dev/null @@ -1,131 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_postgresflexalpha_instance Resource - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_postgresflexalpha_instance (Resource) - - - -## Example Usage - -```terraform -resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - name = "example-instance" - acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] - backup_schedule = "0 0 * * *" - retention_days = 30 - flavor_id = "flavor.id" - replicas = 1 - storage = { - performance_class = "premium-perf2-stackit" - size = 10 - } - encryption = { - kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - kek_key_version = 1 - service_account = "service@account.email" - } - network = { - acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] - access_scope = "PUBLIC" - } - version = 17 -} - -# Only use the import statement, if you want to import an existing postgresflex instance -import { - to = stackitprivatepreview_postgresflexalpha_instance.import-example - id = "${var.project_id},${var.region},${var.postgres_instance_id}" -} - -import { - to = stackitprivatepreview_postgresflexalpha_instance.import-example - identity = { - project_id = var.project_id - region = var.region - instance_id = var.postgres_instance_id - } -} -``` - - -## Schema - -### Required - -- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule. -- `flavor_id` (String) The id of the instance flavor. -- `name` (String) The name of the instance. -- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) -- `replicas` (Number) How many replicas the instance should have. -- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days. -- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) -- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters. - -### Optional - -- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption. - -⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption)) -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `acl` (List of String) List of IPV4 cidr. -- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info)) -- `id` (String) The ID of the instance. -- `is_deletable` (Boolean) Whether the instance can be deleted or not. -- `status` (String) The current status of the instance. - - -### Nested Schema for `network` - -Required: - -- `acl` (List of String) List of IPV4 cidr. - -Optional: - -- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped. - -Read-Only: - -- `instance_address` (String) -- `router_address` (String) - - - -### Nested Schema for `storage` - -Required: - -- `performance_class` (String) The storage class for the storage. -- `size` (Number) The storage size in Gigabytes. - - - -### Nested Schema for `encryption` - -Required: - -- `kek_key_id` (String) The encryption-key key identifier -- `kek_key_ring_id` (String) The encryption-key keyring identifier -- `kek_key_version` (String) The encryption-key version -- `service_account` (String) - - - -### Nested Schema for `connection_info` - -Read-Only: - -- `host` (String) The host of the instance. -- `port` (Number) The port of the instance. diff --git a/docs/resources/postgresflexalpha_user.md b/docs/resources/postgresflexalpha_user.md deleted file mode 100644 index 168d17ff..00000000 --- a/docs/resources/postgresflexalpha_user.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_postgresflexalpha_user Resource - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_postgresflexalpha_user (Resource) - - - -## Example Usage - -```terraform -resource "stackitprivatepreview_postgresflexalpha_user" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - name = "username" - roles = ["role"] -} - -# Only use the import statement, if you want to import an existing postgresflex user -import { - to = stackitprivatepreview_postgresflexalpha_user.import-example - id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}" -} - -import { - to = stackitprivatepreview_postgresflexalpha_user.import-example - identity = { - project_id = "project.id" - region = "region" - instance_id = "instance.id" - user_id = "user.id" - } -} -``` - - -## Schema - -### Required - -- `name` (String) The name of the user. - -### Optional - -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed -- `roles` (List of String) A list containing the user roles for the instance. -- `user_id` (Number) The ID of the user. - -### Read-Only - -- `id` (Number) The ID of the user. -- `password` (String) The password for the user. -- `status` (String) The current status of the user. diff --git a/docs/resources/sqlserverflexalpha_database.md b/docs/resources/sqlserverflexalpha_database.md deleted file mode 100644 index 7d8f050b..00000000 --- a/docs/resources/sqlserverflexalpha_database.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexalpha_database Resource - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexalpha_database (Resource) - - - -## Example Usage - -```terraform -resource "stackitprivatepreview_sqlserverflexalpha_database" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - collation = "" - compatibility = "160" - name = "" - owner = "" -} - -# Only use the import statement, if you want to import a existing sqlserverflex database -import { - to = stackitprivatepreview_sqlserverflexalpha_database.import-example - id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" -} - -import { - to = stackitprivatepreview_sqlserverflexalpha_database.import-example - identity = { - project_id = "project.id" - region = "region" - instance_id = "instance.id" - database_id = "database.id" - } -} -``` - - -## Schema - -### Required - -- `name` (String) The name of the database. -- `owner` (String) The owner of the database. - -### Optional - -- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. -- `compatibility` (Number) CompatibilityLevel of the Database. -- `database_name` (String) The name of the database. -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. -- `compatibility_level` (Number) CompatibilityLevel of the Database. -- `id` (Number) The id of the database. diff --git a/docs/resources/sqlserverflexalpha_instance.md b/docs/resources/sqlserverflexalpha_instance.md deleted file mode 100644 index 95e33673..00000000 --- a/docs/resources/sqlserverflexalpha_instance.md +++ /dev/null @@ -1,103 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexalpha_instance Resource - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexalpha_instance (Resource) - - - -## Example Usage - -```terraform -resource "stackitprivatepreview_sqlserverflexalpha_instance" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - name = "example-instance" - acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] - backup_schedule = "00 00 * * *" - flavor = { - cpu = 4 - ram = 16 - } - storage = { - class = "class" - size = 5 - } - version = 2022 -} - -# Only use the import statement, if you want to import an existing sqlserverflex instance -import { - to = stackitprivatepreview_sqlserverflexalpha_instance.import-example - id = "${var.project_id},${var.region},${var.sql_instance_id}" -} -``` - - -## Schema - -### Required - -- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule. -- `flavor_id` (String) The id of the instance flavor. -- `name` (String) The name of the instance. -- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network)) -- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365 -- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) -- `version` (String) The sqlserver version used for the instance. - -### Optional - -- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `edition` (String) Edition of the MSSQL server instance -- `id` (String) The ID of the instance. -- `is_deletable` (Boolean) Whether the instance can be deleted or not. -- `replicas` (Number) How many replicas the instance should have. -- `status` (String) - - -### Nested Schema for `network` - -Required: - -- `acl` (List of String) List of IPV4 cidr. - -Optional: - -- `access_scope` (String) The network access scope of the instance - -⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. - -Read-Only: - -- `instance_address` (String) -- `router_address` (String) - - - -### Nested Schema for `storage` - -Required: - -- `class` (String) The storage class for the storage. -- `size` (Number) The storage size in Gigabytes. - - - -### Nested Schema for `encryption` - -Required: - -- `kek_key_id` (String) The key identifier -- `kek_key_ring_id` (String) The keyring identifier -- `kek_key_version` (String) The key version -- `service_account` (String) diff --git a/docs/resources/sqlserverflexalpha_user.md b/docs/resources/sqlserverflexalpha_user.md deleted file mode 100644 index 85d5350e..00000000 --- a/docs/resources/sqlserverflexalpha_user.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexalpha_user Resource - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexalpha_user (Resource) - - - -## Example Usage - -```terraform -resource "stackitprivatepreview_sqlserverflexalpha_user" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - username = "username" - roles = ["role"] -} - -# Only use the import statement, if you want to import an existing sqlserverflex user -import { - to = stackitprivatepreview_sqlserverflexalpha_user.import-example - id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" -} -``` - - -## Schema - -### Required - -- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint. -- `username` (String) The name of the user. - -### Optional - -- `default_database` (String) The default database for a user of the instance. -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed -- `user_id` (Number) The ID of the user. - -### Read-Only - -- `host` (String) The host of the instance in which the user belongs to. -- `id` (Number) The ID of the user. -- `password` (String) The password for the user. -- `port` (Number) The port of the instance in which the user belongs to. -- `status` (String) The current status of the user. -- `uri` (String) The connection string for the user to the instance. diff --git a/docs/resources/sqlserverflexbeta_database.md b/docs/resources/sqlserverflexbeta_database.md deleted file mode 100644 index fabaaccb..00000000 --- a/docs/resources/sqlserverflexbeta_database.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexbeta_database Resource - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexbeta_database (Resource) - - - -## Example Usage - -```terraform -resource "stackitprivatepreview_sqlserverflexalpha_user" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - username = "username" - roles = ["role"] -} - -# Only use the import statement, if you want to import an existing sqlserverflex user -import { - to = stackitprivatepreview_sqlserverflexalpha_user.import-example - id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" -} -``` - - -## Schema - -### Required - -- `name` (String) The name of the database. -- `owner` (String) The owner of the database. - -### Optional - -- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. -- `compatibility` (Number) CompatibilityLevel of the Database. -- `database_name` (String) The name of the database. -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. -- `compatibility_level` (Number) CompatibilityLevel of the Database. -- `id` (Number) The id of the database. diff --git a/docs/resources/sqlserverflexbeta_instance.md b/docs/resources/sqlserverflexbeta_instance.md deleted file mode 100644 index 20f5a9bc..00000000 --- a/docs/resources/sqlserverflexbeta_instance.md +++ /dev/null @@ -1,158 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexbeta_instance Resource - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexbeta_instance (Resource) - - - -## Example Usage - -```terraform -# without encryption and SNA -resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - name = "example-instance" - backup_schedule = "0 3 * * *" - retention_days = 31 - flavor_id = "flavor_id" - storage = { - class = "premium-perf2-stackit" - size = 50 - } - version = 2022 - network = { - acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] - access_scope = "SNA" - } -} - -# without encryption and PUBLIC -resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - name = "example-instance" - backup_schedule = "0 3 * * *" - retention_days = 31 - flavor_id = "flavor_id" - storage = { - class = "premium-perf2-stackit" - size = 50 - } - version = 2022 - network = { - acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] - access_scope = "PUBLIC" - } -} - -# with encryption and SNA -resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - name = "example-instance" - backup_schedule = "0 3 * * *" - retention_days = 31 - flavor_id = "flavor_id" - storage = { - class = "premium-perf2-stackit" - size = 50 - } - version = 2022 - encryption = { - kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - kek_key_version = 1 - service_account = "service_account@email" - } - network = { - acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] - access_scope = "SNA" - } -} - - -# Only use the import statement, if you want to import an existing sqlserverflex instance -import { - to = stackitprivatepreview_sqlserverflexalpha_instance.import-example - id = "${var.project_id},${var.region},${var.sql_instance_id}" -} - -# import with identity -import { - to = stackitprivatepreview_sqlserverflexalpha_instance.import-example - identity = { - project_id = var.project_id - region = var.region - instance_id = var.sql_instance_id - } -} -``` - - -## Schema - -### Required - -- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule. -- `flavor_id` (String) The id of the instance flavor. -- `name` (String) The name of the instance. -- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network)) -- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365 -- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage)) -- `version` (String) The sqlserver version used for the instance. - -### Optional - -- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed - -### Read-Only - -- `edition` (String) Edition of the MSSQL server instance -- `id` (String) The ID of the instance. -- `is_deletable` (Boolean) Whether the instance can be deleted or not. -- `replicas` (Number) How many replicas the instance should have. -- `status` (String) - - -### Nested Schema for `network` - -Required: - -- `acl` (List of String) List of IPV4 cidr. - -Optional: - -- `access_scope` (String) The network access scope of the instance - -⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. - -Read-Only: - -- `instance_address` (String) -- `router_address` (String) - - - -### Nested Schema for `storage` - -Required: - -- `class` (String) The storage class for the storage. -- `size` (Number) The storage size in Gigabytes. - - - -### Nested Schema for `encryption` - -Required: - -- `kek_key_id` (String) The key identifier -- `kek_key_ring_id` (String) The keyring identifier -- `kek_key_version` (String) The key version -- `service_account` (String) diff --git a/docs/resources/sqlserverflexbeta_user.md b/docs/resources/sqlserverflexbeta_user.md deleted file mode 100644 index 81d6da28..00000000 --- a/docs/resources/sqlserverflexbeta_user.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview" -subcategory: "" -description: |- - ---- - -# stackitprivatepreview_sqlserverflexbeta_user (Resource) - - - -## Example Usage - -```terraform -resource "stackitprivatepreview_sqlserverflexalpha_user" "example" { - project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - username = "username" - roles = ["role"] -} - -# Only use the import statement, if you want to import an existing sqlserverflex user -import { - to = stackitprivatepreview_sqlserverflexalpha_user.import-example - id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}" -} -``` - - -## Schema - -### Required - -- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint. -- `username` (String) The name of the user. - -### Optional - -- `default_database` (String) The default database for a user of the instance. -- `instance_id` (String) The ID of the instance. -- `project_id` (String) The STACKIT project ID. -- `region` (String) The region which should be addressed -- `user_id` (Number) The ID of the user. - -### Read-Only - -- `host` (String) The host of the instance in which the user belongs to. -- `id` (Number) The ID of the user. -- `password` (String) The password for the user. -- `port` (Number) The port of the instance in which the user belongs to. -- `status` (String) The current status of the user. -- `uri` (String) The connection string for the user to the instance. diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf index 711a9f60..b503f0ce 100644 --- a/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf +++ b/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf @@ -17,7 +17,7 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" { service_account = "service@account.email" } network = { - acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] + acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] access_scope = "PUBLIC" } version = 17 diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf index 756e854d..695741c4 100644 --- a/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf +++ b/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf @@ -1,7 +1,7 @@ resource "stackitprivatepreview_postgresflexalpha_user" "example" { project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - name = "username" + name = "username" roles = ["role"] } diff --git a/go.mod b/go.mod index 749c34d5..d4cfe7c4 100644 --- a/go.mod +++ b/go.mod @@ -2,10 +2,17 @@ module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stac go 1.25.6 + + require ( github.com/SladkyCitron/slogcolor v1.8.0 + github.com/golang-jwt/jwt/v5 v5.3.1 + github.com/golangci/golangci-lint/v2 v2.10.1 github.com/google/go-cmp v0.7.0 github.com/google/uuid v1.6.0 + github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1 + github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0 + github.com/hashicorp/terraform-plugin-docs v0.24.0 github.com/hashicorp/terraform-plugin-framework v1.17.0 github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 github.com/hashicorp/terraform-plugin-go v0.29.0 @@ -19,71 +26,277 @@ require ( github.com/spf13/cobra v1.10.2 github.com/stackitcloud/stackit-sdk-go/core v0.21.1 github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha - github.com/stretchr/testify v1.11.1 github.com/teambition/rrule-go v1.8.2 + golang.org/x/tools v0.42.0 gopkg.in/yaml.v3 v3.0.1 ) -require ( - github.com/hashicorp/go-retryablehttp v0.7.8 // indirect - golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1 // indirect -) +require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect require ( + 4d63.com/gocheckcompilerdirectives v1.3.0 // indirect + 4d63.com/gochecknoglobals v0.2.2 // indirect + codeberg.org/chavacava/garif v0.2.0 // indirect + codeberg.org/polyfloyd/go-errorlint v1.9.0 // indirect dario.cat/mergo v1.0.1 // indirect + dev.gaijin.team/go/exhaustruct/v4 v4.0.0 // indirect + dev.gaijin.team/go/golib v0.6.0 // indirect + github.com/4meepo/tagalign v1.4.3 // indirect + github.com/Abirdcfly/dupword v0.1.7 // indirect + github.com/AdminBenni/iota-mixing v1.0.0 // indirect + github.com/AlwxSin/noinlineerr v1.0.5 // indirect + github.com/Antonboom/errname v1.1.1 // indirect + github.com/Antonboom/nilnil v1.1.1 // indirect + github.com/Antonboom/testifylint v1.6.4 // indirect + github.com/BurntSushi/toml v1.6.0 // indirect + github.com/Djarvur/go-err113 v0.1.1 // indirect + github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect + github.com/Masterminds/goutils v1.1.1 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect + github.com/Masterminds/sprig/v3 v3.2.3 // indirect + github.com/MirrexOne/unqueryvet v1.5.3 // indirect + github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect github.com/ProtonMail/go-crypto v1.3.0 // indirect github.com/agext/levenshtein v1.2.3 // indirect + github.com/alecthomas/chroma/v2 v2.23.1 // indirect + github.com/alecthomas/go-check-sumtype v0.3.1 // indirect + github.com/alexkohler/nakedret/v2 v2.0.6 // indirect + github.com/alexkohler/prealloc v1.0.2 // indirect + github.com/alfatraining/structtag v1.0.0 // indirect + github.com/alingse/asasalint v0.0.11 // indirect + github.com/alingse/nilnesserr v0.2.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect + github.com/armon/go-radix v1.0.0 // indirect + github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect + github.com/ashanbrown/makezero/v2 v2.1.0 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/speakeasy v0.1.0 // indirect + github.com/bkielbasa/cyclop v1.2.3 // indirect + github.com/blizzy78/varnamelen v0.8.0 // indirect + github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect + github.com/bombsimon/wsl/v4 v4.7.0 // indirect + github.com/bombsimon/wsl/v5 v5.6.0 // indirect + github.com/breml/bidichk v0.3.3 // indirect + github.com/breml/errchkjson v0.4.1 // indirect + github.com/buger/jsonparser v1.1.1 // indirect + github.com/butuzov/ireturn v0.4.0 // indirect + github.com/butuzov/mirror v1.3.0 // indirect + github.com/catenacyber/perfsprint v0.10.1 // indirect + github.com/ccojocar/zxcvbn-go v1.0.4 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/charithe/durationcheck v0.0.11 // indirect + github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect + github.com/charmbracelet/lipgloss v1.1.0 // indirect + github.com/charmbracelet/x/ansi v0.10.1 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/ckaznocha/intrange v0.3.1 // indirect github.com/cloudflare/circl v1.6.3 // indirect + github.com/curioswitch/go-reassign v0.3.0 // indirect + github.com/daixiang0/gci v0.13.7 // indirect + github.com/dave/dst v0.27.3 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/denis-tingaikin/go-header v0.5.0 // indirect + github.com/dlclark/regexp2 v1.11.5 // indirect + github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect + github.com/ettle/strcase v0.2.0 // indirect github.com/fatih/color v1.18.0 // indirect - github.com/golang-jwt/jwt/v5 v5.3.1 // indirect + github.com/fatih/structtag v1.2.0 // indirect + github.com/firefart/nonamedreturns v1.0.6 // indirect + github.com/fsnotify/fsnotify v1.5.4 // indirect + github.com/fzipp/gocyclo v0.6.0 // indirect + github.com/ghostiam/protogetter v0.3.20 // indirect + github.com/go-critic/go-critic v0.14.3 // indirect + github.com/go-toolsmith/astcast v1.1.0 // indirect + github.com/go-toolsmith/astcopy v1.1.0 // indirect + github.com/go-toolsmith/astequal v1.2.0 // indirect + github.com/go-toolsmith/astfmt v1.1.0 // indirect + github.com/go-toolsmith/astp v1.1.0 // indirect + github.com/go-toolsmith/strparse v1.1.0 // indirect + github.com/go-toolsmith/typep v1.1.0 // indirect + github.com/go-viper/mapstructure/v2 v2.5.0 // indirect + github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect + github.com/gobwas/glob v0.2.3 // indirect + github.com/godoc-lint/godoc-lint v0.11.2 // indirect + github.com/gofrs/flock v0.13.0 // indirect github.com/golang/protobuf v1.5.4 // indirect + github.com/golangci/asciicheck v0.5.0 // indirect + github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect + github.com/golangci/go-printf-func-name v0.1.1 // indirect + github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect + github.com/golangci/golines v0.15.0 // indirect + github.com/golangci/misspell v0.8.0 // indirect + github.com/golangci/plugin-module-register v0.1.2 // indirect + github.com/golangci/revgrep v0.8.0 // indirect + github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e // indirect + github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e // indirect + github.com/gordonklaus/ineffassign v0.2.0 // indirect + github.com/gostaticanalysis/analysisutil v0.7.1 // indirect + github.com/gostaticanalysis/comment v1.5.0 // indirect + github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect + github.com/gostaticanalysis/nilerr v0.1.2 // indirect + github.com/hashicorp/cli v1.1.7 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-cty v1.5.0 // indirect github.com/hashicorp/go-hclog v1.6.3 // indirect + github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-plugin v1.7.0 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-version v1.8.0 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hc-install v0.9.3 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect github.com/hashicorp/hcl/v2 v2.24.0 // indirect github.com/hashicorp/logutils v1.0.0 // indirect github.com/hashicorp/terraform-exec v0.25.0 // indirect github.com/hashicorp/terraform-json v0.27.2 // indirect + github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 // indirect github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect github.com/hashicorp/terraform-registry-address v0.4.0 // indirect github.com/hashicorp/terraform-svchost v0.2.0 // indirect github.com/hashicorp/yamux v0.1.2 // indirect + github.com/hexops/gotextdiff v1.0.3 // indirect + github.com/huandu/xstrings v1.4.0 // indirect + github.com/imdario/mergo v0.3.16 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/kr/text v0.2.0 // indirect + github.com/jgautheron/goconst v1.8.2 // indirect + github.com/jingyugao/rowserrcheck v1.1.1 // indirect + github.com/jjti/go-spancheck v0.6.5 // indirect + github.com/julz/importas v0.2.0 // indirect + github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect + github.com/kisielk/errcheck v1.9.0 // indirect + github.com/kkHAIKE/contextcheck v1.1.6 // indirect + github.com/kulti/thelper v0.7.1 // indirect + github.com/kunwardeep/paralleltest v1.0.15 // indirect + github.com/lasiar/canonicalheader v1.1.2 // indirect + github.com/ldez/exptostd v0.4.5 // indirect + github.com/ldez/gomoddirectives v0.8.0 // indirect + github.com/ldez/grignotin v0.10.1 // indirect + github.com/ldez/structtags v0.6.1 // indirect + github.com/ldez/tagliatelle v0.7.2 // indirect + github.com/ldez/usetesting v0.5.0 // indirect + github.com/leonklingele/grouper v1.1.2 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/macabu/inamedparam v0.2.0 // indirect + github.com/magiconair/properties v1.8.6 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect + github.com/manuelarte/funcorder v0.5.0 // indirect + github.com/maratori/testableexamples v1.0.1 // indirect + github.com/maratori/testpackage v1.1.2 // indirect + github.com/matoous/godox v1.1.0 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect + github.com/mgechev/revive v1.14.0 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect + github.com/moricho/tparallel v0.3.2 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/nakabonne/nestif v0.3.1 // indirect + github.com/nishanths/exhaustive v0.12.0 // indirect + github.com/nishanths/predeclared v0.2.2 // indirect + github.com/nunnatsa/ginkgolinter v0.23.0 // indirect github.com/oklog/run v1.2.0 // indirect + github.com/pb33f/libopenapi v0.15.0 // indirect + github.com/pelletier/go-toml v1.9.5 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/posener/complete v1.2.3 // indirect + github.com/prometheus/client_golang v1.12.1 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/common v0.32.1 // indirect + github.com/prometheus/procfs v0.7.3 // indirect + github.com/quasilyte/go-ruleguard v0.4.5 // indirect + github.com/quasilyte/go-ruleguard/dsl v0.3.23 // indirect + github.com/quasilyte/gogrep v0.5.0 // indirect + github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect + github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect + github.com/raeperd/recvcheck v0.2.0 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/ryancurrah/gomodguard v1.4.1 // indirect + github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect + github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect + github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect + github.com/sashamelentyev/interfacebloat v1.1.0 // indirect + github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect + github.com/securego/gosec/v2 v2.23.0 // indirect + github.com/shopspring/decimal v1.3.1 // indirect + github.com/sirupsen/logrus v1.9.4 // indirect + github.com/sivchari/containedctx v1.0.3 // indirect + github.com/sonatard/noctx v0.4.0 // indirect + github.com/sourcegraph/go-diff v0.7.0 // indirect + github.com/spf13/afero v1.15.0 // indirect + github.com/spf13/cast v1.5.1 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/pflag v1.0.10 // indirect + github.com/spf13/viper v1.12.0 // indirect + github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect + github.com/stbenjam/no-sprintf-host-port v0.3.1 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/stretchr/testify v1.11.1 // indirect + github.com/subosito/gotenv v1.4.1 // indirect + github.com/tetafro/godot v1.5.4 // indirect + github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 // indirect + github.com/timonwong/loggercheck v0.11.0 // indirect + github.com/tomarrell/wrapcheck/v2 v2.12.0 // indirect + github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect + github.com/ultraware/funlen v0.2.0 // indirect + github.com/ultraware/whitespace v0.2.0 // indirect + github.com/uudashr/gocognit v1.2.0 // indirect + github.com/uudashr/iface v1.4.1 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.2.0 // indirect + github.com/xen0n/gosmopolitan v1.3.0 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + github.com/yagipy/maintidx v1.0.0 // indirect + github.com/yeya24/promlinter v0.3.0 // indirect + github.com/ykadowak/zerologlint v0.1.5 // indirect + github.com/yuin/goldmark v1.7.7 // indirect + github.com/yuin/goldmark-meta v1.1.0 // indirect github.com/zclconf/go-cty v1.17.0 // indirect + gitlab.com/bosi/decorder v0.4.2 // indirect + go-simpler.org/musttag v0.14.0 // indirect + go-simpler.org/sloglint v0.11.1 // indirect + go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect + go.augendre.info/arangolint v0.4.0 // indirect + go.augendre.info/fatcontext v0.9.0 // indirect + go.uber.org/multierr v1.10.0 // indirect + go.uber.org/zap v1.27.0 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect golang.org/x/crypto v0.48.0 // indirect + golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect + golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect golang.org/x/mod v0.33.0 // indirect golang.org/x/net v0.50.0 // indirect golang.org/x/sync v0.19.0 // indirect golang.org/x/sys v0.41.0 // indirect + golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4 // indirect golang.org/x/text v0.34.0 // indirect - golang.org/x/tools v0.42.0 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 // indirect google.golang.org/grpc v1.79.1 // indirect google.golang.org/protobuf v1.36.11 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + honnef.co/go/tools v0.7.0 // indirect + mvdan.cc/gofumpt v0.9.2 // indirect + mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect ) - -tool golang.org/x/tools/cmd/goimports diff --git a/go.sum b/go.sum index 6553d35b..00f1e6ff 100644 --- a/go.sum +++ b/go.sum @@ -1,63 +1,399 @@ +4d63.com/gocheckcompilerdirectives v1.3.0 h1:Ew5y5CtcAAQeTVKUVFrE7EwHMrTO6BggtEj8BZSjZ3A= +4d63.com/gocheckcompilerdirectives v1.3.0/go.mod h1:ofsJ4zx2QAuIP/NO/NAh1ig6R1Fb18/GI7RVMwz7kAY= +4d63.com/gochecknoglobals v0.2.2 h1:H1vdnwnMaZdQW/N+NrkT1SZMTBmcwHe9Vq8lJcYYTtU= +4d63.com/gochecknoglobals v0.2.2/go.mod h1:lLxwTQjL5eIesRbvnzIP3jZtG140FnTdz+AlMa+ogt0= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +codeberg.org/chavacava/garif v0.2.0 h1:F0tVjhYbuOCnvNcU3YSpO6b3Waw6Bimy4K0mM8y6MfY= +codeberg.org/chavacava/garif v0.2.0/go.mod h1:P2BPbVbT4QcvLZrORc2T29szK3xEOlnl0GiPTJmEqBQ= +codeberg.org/polyfloyd/go-errorlint v1.9.0 h1:VkdEEmA1VBpH6ecQoMR4LdphVI3fA4RrCh2an7YmodI= +codeberg.org/polyfloyd/go-errorlint v1.9.0/go.mod h1:GPRRu2LzVijNn4YkrZYJfatQIdS+TrcK8rL5Xs24qw8= dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +dev.gaijin.team/go/exhaustruct/v4 v4.0.0 h1:873r7aNneqoBB3IaFIzhvt2RFYTuHgmMjoKfwODoI1Y= +dev.gaijin.team/go/exhaustruct/v4 v4.0.0/go.mod h1:aZ/k2o4Y05aMJtiux15x8iXaumE88YdiB0Ai4fXOzPI= +dev.gaijin.team/go/golib v0.6.0 h1:v6nnznFTs4bppib/NyU1PQxobwDHwCXXl15P7DV5Zgo= +dev.gaijin.team/go/golib v0.6.0/go.mod h1:uY1mShx8Z/aNHWDyAkZTkX+uCi5PdX7KsG1eDQa2AVE= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/4meepo/tagalign v1.4.3 h1:Bnu7jGWwbfpAie2vyl63Zup5KuRv21olsPIha53BJr8= +github.com/4meepo/tagalign v1.4.3/go.mod h1:00WwRjiuSbrRJnSVeGWPLp2epS5Q/l4UEy0apLLS37c= +github.com/Abirdcfly/dupword v0.1.7 h1:2j8sInznrje4I0CMisSL6ipEBkeJUJAmK1/lfoNGWrQ= +github.com/Abirdcfly/dupword v0.1.7/go.mod h1:K0DkBeOebJ4VyOICFdppB23Q0YMOgVafM0zYW0n9lF4= +github.com/AdminBenni/iota-mixing v1.0.0 h1:Os6lpjG2dp/AE5fYBPAA1zfa2qMdCAWwPMCgpwKq7wo= +github.com/AdminBenni/iota-mixing v1.0.0/go.mod h1:i4+tpAaB+qMVIV9OK3m4/DAynOd5bQFaOu+2AhtBCNY= +github.com/AlwxSin/noinlineerr v1.0.5 h1:RUjt63wk1AYWTXtVXbSqemlbVTb23JOSRiNsshj7TbY= +github.com/AlwxSin/noinlineerr v1.0.5/go.mod h1:+QgkkoYrMH7RHvcdxdlI7vYYEdgeoFOVjU9sUhw/rQc= +github.com/Antonboom/errname v1.1.1 h1:bllB7mlIbTVzO9jmSWVWLjxTEbGBVQ1Ff/ClQgtPw9Q= +github.com/Antonboom/errname v1.1.1/go.mod h1:gjhe24xoxXp0ScLtHzjiXp0Exi1RFLKJb0bVBtWKCWQ= +github.com/Antonboom/nilnil v1.1.1 h1:9Mdr6BYd8WHCDngQnNVV0b554xyisFioEKi30sksufQ= +github.com/Antonboom/nilnil v1.1.1/go.mod h1:yCyAmSw3doopbOWhJlVci+HuyNRuHJKIv6V2oYQa8II= +github.com/Antonboom/testifylint v1.6.4 h1:gs9fUEy+egzxkEbq9P4cpcMB6/G0DYdMeiFS87UiqmQ= +github.com/Antonboom/testifylint v1.6.4/go.mod h1:YO33FROXX2OoUfwjz8g+gUxQXio5i9qpVy7nXGbxDD4= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v1.6.0 h1:dRaEfpa2VI55EwlIW72hMRHdWouJeRF7TPYhI+AUQjk= +github.com/BurntSushi/toml v1.6.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Djarvur/go-err113 v0.1.1 h1:eHfopDqXRwAi+YmCUas75ZE0+hoBHJ2GQNLYRSxao4g= +github.com/Djarvur/go-err113 v0.1.1/go.mod h1:IaWJdYFLg76t2ihfflPZnM1LIQszWOsFDh2hhhAVF6k= +github.com/Kunde21/markdownfmt/v3 v3.1.0 h1:KiZu9LKs+wFFBQKhrZJrFZwtLnCCWJahL+S+E/3VnM0= +github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc= +github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= +github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= +github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/MirrexOne/unqueryvet v1.5.3 h1:LpT3rsH+IY3cQddWF9bg4C7jsbASdGnrOSofY8IPEiw= +github.com/MirrexOne/unqueryvet v1.5.3/go.mod h1:fs9Zq6eh1LRIhsDIsxf9PONVUjYdFHdtkHIgZdJnyPU= +github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4= +github.com/OpenPeeDeeP/depguard/v2 v2.2.1/go.mod h1:q4DKzC4UcVaAvcfd41CZh0PWpGgzrVxUYBlgKNGquUo= github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw= github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= github.com/SladkyCitron/slogcolor v1.8.0 h1:ln4mUPfVhs7a/vZfjnKkz5YZ71Bg/KFWneS2hfFq6FM= github.com/SladkyCitron/slogcolor v1.8.0/go.mod h1:ft8LEVIl4isUkebakhv+ngNXJjWBumnwhXfxTLApf3M= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0= +github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= +github.com/alecthomas/chroma/v2 v2.23.1 h1:nv2AVZdTyClGbVQkIzlDm/rnhk1E9bU9nXwmZ/Vk/iY= +github.com/alecthomas/chroma/v2 v2.23.1/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o= +github.com/alecthomas/go-check-sumtype v0.3.1 h1:u9aUvbGINJxLVXiFvHUlPEaD7VDULsrxJb4Aq31NLkU= +github.com/alecthomas/go-check-sumtype v0.3.1/go.mod h1:A8TSiN3UPRw3laIgWEUOHHLPa6/r9MtoigdlP5h3K/E= +github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs= +github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alexkohler/nakedret/v2 v2.0.6 h1:ME3Qef1/KIKr3kWX3nti3hhgNxw6aqN5pZmQiFSsuzQ= +github.com/alexkohler/nakedret/v2 v2.0.6/go.mod h1:l3RKju/IzOMQHmsEvXwkqMDzHHvurNQfAgE1eVmT40Q= +github.com/alexkohler/prealloc v1.0.2 h1:MPo8cIkGkZytq7WNH9UHv3DIX1mPz1RatPXnZb0zHWQ= +github.com/alexkohler/prealloc v1.0.2/go.mod h1:fT39Jge3bQrfA7nPMDngUfvUbQGQeJyGQnR+913SCig= +github.com/alfatraining/structtag v1.0.0 h1:2qmcUqNcCoyVJ0up879K614L9PazjBSFruTB0GOFjCc= +github.com/alfatraining/structtag v1.0.0/go.mod h1:p3Xi5SwzTi+Ryj64DqjLWz7XurHxbGsq6y3ubePJPus= +github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw= +github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= +github.com/alingse/nilnesserr v0.2.0 h1:raLem5KG7EFVb4UIDAXgrv3N2JIaffeKNtcEXkEWd/w= +github.com/alingse/nilnesserr v0.2.0/go.mod h1:1xJPrXonEtX7wyTq8Dytns5P2hNzoWymVUIaKm4HNFg= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= +github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/ashanbrown/forbidigo/v2 v2.3.0 h1:OZZDOchCgsX5gvToVtEBoV2UWbFfI6RKQTir2UZzSxo= +github.com/ashanbrown/forbidigo/v2 v2.3.0/go.mod h1:5p6VmsG5/1xx3E785W9fouMxIOkvY2rRV9nMdWadd6c= +github.com/ashanbrown/makezero/v2 v2.1.0 h1:snuKYMbqosNokUKm+R6/+vOPs8yVAi46La7Ck6QYSaE= +github.com/ashanbrown/makezero/v2 v2.1.0/go.mod h1:aEGT/9q3S8DHeE57C88z2a6xydvgx8J5hgXIGWgo0MY= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bkielbasa/cyclop v1.2.3 h1:faIVMIGDIANuGPWH031CZJTi2ymOQBULs9H21HSMa5w= +github.com/bkielbasa/cyclop v1.2.3/go.mod h1:kHTwA9Q0uZqOADdupvcFJQtp/ksSnytRMe8ztxG8Fuo= +github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= +github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= +github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE= +github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= +github.com/bombsimon/wsl/v4 v4.7.0 h1:1Ilm9JBPRczjyUs6hvOPKvd7VL1Q++PL8M0SXBDf+jQ= +github.com/bombsimon/wsl/v4 v4.7.0/go.mod h1:uV/+6BkffuzSAVYD+yGyld1AChO7/EuLrCF/8xTiapg= +github.com/bombsimon/wsl/v5 v5.6.0 h1:4z+/sBqC5vUmSp1O0mS+czxwH9+LKXtCWtHH9rZGQL8= +github.com/bombsimon/wsl/v5 v5.6.0/go.mod h1:Uqt2EfrMj2NV8UGoN1f1Y3m0NpUVCsUdrNCdet+8LvU= +github.com/breml/bidichk v0.3.3 h1:WSM67ztRusf1sMoqH6/c4OBCUlRVTKq+CbSeo0R17sE= +github.com/breml/bidichk v0.3.3/go.mod h1:ISbsut8OnjB367j5NseXEGGgO/th206dVa427kR8YTE= +github.com/breml/errchkjson v0.4.1 h1:keFSS8D7A2T0haP9kzZTi7o26r7kE3vymjZNeNDRDwg= +github.com/breml/errchkjson v0.4.1/go.mod h1:a23OvR6Qvcl7DG/Z4o0el6BRAjKnaReoPQFciAl9U3s= github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw= github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/butuzov/ireturn v0.4.0 h1:+s76bF/PfeKEdbG8b54aCocxXmi0wvYdOVsWxVO7n8E= +github.com/butuzov/ireturn v0.4.0/go.mod h1:ghI0FrCmap8pDWZwfPisFD1vEc56VKH4NpQUxDHta70= +github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc= +github.com/butuzov/mirror v1.3.0/go.mod h1:AEij0Z8YMALaq4yQj9CPPVYOyJQyiexpQEQgihajRfI= +github.com/catenacyber/perfsprint v0.10.1 h1:u7Riei30bk46XsG8nknMhKLXG9BcXz3+3tl/WpKm0PQ= +github.com/catenacyber/perfsprint v0.10.1/go.mod h1:DJTGsi/Zufpuus6XPGJyKOTMELe347o6akPvWG9Zcsc= +github.com/ccojocar/zxcvbn-go v1.0.4 h1:FWnCIRMXPj43ukfX000kvBZvV6raSxakYr1nzyNrUcc= +github.com/ccojocar/zxcvbn-go v1.0.4/go.mod h1:3GxGX+rHmueTUMvm5ium7irpyjmm7ikxYFOSJB21Das= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/charithe/durationcheck v0.0.11 h1:g1/EX1eIiKS57NTWsYtHDZ/APfeXKhye1DidBcABctk= +github.com/charithe/durationcheck v0.0.11/go.mod h1:x5iZaixRNl8ctbM+3B2RrPG5t856TxRyVQEnbIEM2X4= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs= +github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= +github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ= +github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE= +github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8= +github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs= +github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= +github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/ckaznocha/intrange v0.3.1 h1:j1onQyXvHUsPWujDH6WIjhyH26gkRt/txNlV7LspvJs= +github.com/ckaznocha/intrange v0.3.1/go.mod h1:QVepyz1AkUoFQkpEqksSYpNpUo3c5W7nWh/s6SHIJJk= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8= github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/curioswitch/go-reassign v0.3.0 h1:dh3kpQHuADL3cobV/sSGETA8DOv457dwl+fbBAhrQPs= +github.com/curioswitch/go-reassign v0.3.0/go.mod h1:nApPCCTtqLJN/s8HfItCcKV0jIPwluBOvZP+dsJGA88= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= +github.com/daixiang0/gci v0.13.7 h1:+0bG5eK9vlI08J+J/NWGbWPTNiXPG4WhNLJOkSxWITQ= +github.com/daixiang0/gci v0.13.7/go.mod h1:812WVN6JLFY9S6Tv76twqmNqevN0pa3SX3nih0brVzQ= +github.com/dave/dst v0.27.3 h1:P1HPoMza3cMEquVf9kKy8yXsFirry4zEnWOdYPOoIzY= +github.com/dave/dst v0.27.3/go.mod h1:jHh6EOibnHgcUW3WjKHisiooEkYwqpHLBSX1iOBhEyc= +github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo= +github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8= +github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY= +github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ= +github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58= +github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 h1:PRxIJD8XjimM5aTknUK9w6DHLDox2r2M3DI4i2pnd3w= +github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936/go.mod h1:ttYvX5qlB+mlV1okblJqcSMtR4c52UKxDiX9GRBS8+Q= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q= +github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= +github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= +github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= +github.com/firefart/nonamedreturns v1.0.6 h1:vmiBcKV/3EqKY3ZiPxCINmpS431OcE1S47AQUwhrg8E= +github.com/firefart/nonamedreturns v1.0.6/go.mod h1:R8NisJnSIpvPWheCq0mNRXJok6D8h7fagJTF8EMEwCo= +github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= +github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= +github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= +github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= +github.com/ghostiam/protogetter v0.3.20 h1:oW7OPFit2FxZOpmMRPP9FffU4uUpfeE/rEdE1f+MzD0= +github.com/ghostiam/protogetter v0.3.20/go.mod h1:FjIu5Yfs6FT391m+Fjp3fbAYJ6rkL/J6ySpZBfnODuI= +github.com/go-critic/go-critic v0.14.3 h1:5R1qH2iFeo4I/RJU8vTezdqs08Egi4u5p6vOESA0pog= +github.com/go-critic/go-critic v0.14.3/go.mod h1:xwntfW6SYAd7h1OqDzmN6hBX/JxsEKl5up/Y2bsxgVQ= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= github.com/go-git/go-git/v5 v5.16.5 h1:mdkuqblwr57kVfXri5TTH+nMFLNUxIj9Z7F5ykFbw5s= github.com/go-git/go-git/v5 v5.16.5/go.mod h1:QOMLpNf1qxuSY4StA/ArOdfFR2TrKEjJiye2kel2m+M= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= +github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8= +github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU= +github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s= +github.com/go-toolsmith/astcopy v1.1.0/go.mod h1:hXM6gan18VA1T/daUEHCFcYiW8Ai1tIwIzHY6srfEAw= +github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4= +github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ= +github.com/go-toolsmith/astequal v1.2.0 h1:3Fs3CYZ1k9Vo4FzFhwwewC3CHISHDnVUPC4x0bI2+Cw= +github.com/go-toolsmith/astequal v1.2.0/go.mod h1:c8NZ3+kSFtFY/8lPso4v8LuJjdJiUFVnSuU3s0qrrDY= +github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsOmcco= +github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4= +github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA= +github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA= +github.com/go-toolsmith/pkgload v1.2.2 h1:0CtmHq/02QhxcF7E9N5LIFcYFsMR5rdovfqTtRKkgIk= +github.com/go-toolsmith/pkgload v1.2.2/go.mod h1:R2hxLNRKuAsiXCo2i5J6ZQPhnPMOVtU+f0arbFPWCus= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= +github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw= +github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= +github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus= +github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig= +github.com/go-viper/mapstructure/v2 v2.5.0 h1:vM5IJoUAy3d7zRSVtIwQgBj7BiWtMPfmPEgAXnvj1Ro= +github.com/go-viper/mapstructure/v2 v2.5.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUWY= +github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/godoc-lint/godoc-lint v0.11.2 h1:Bp0FkJWoSdNsBikdNgIcgtaoo+xz6I/Y9s5WSBQUeeM= +github.com/godoc-lint/godoc-lint v0.11.2/go.mod h1:iVpGdL1JCikNH2gGeAn3Hh+AgN5Gx/I/cxV+91L41jo= +github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= +github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY= github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golangci/asciicheck v0.5.0 h1:jczN/BorERZwK8oiFBOGvlGPknhvq0bjnysTj4nUfo0= +github.com/golangci/asciicheck v0.5.0/go.mod h1:5RMNAInbNFw2krqN6ibBxN/zfRFa9S6tA1nPdM0l8qQ= +github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 h1:WUvBfQL6EW/40l6OmeSBYQJNSif4O11+bmWEz+C7FYw= +github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32/go.mod h1:NUw9Zr2Sy7+HxzdjIULge71wI6yEg1lWQr7Evcu8K0E= +github.com/golangci/go-printf-func-name v0.1.1 h1:hIYTFJqAGp1iwoIfsNTpoq1xZAarogrvjO9AfiW3B4U= +github.com/golangci/go-printf-func-name v0.1.1/go.mod h1:Es64MpWEZbh0UBtTAICOZiB+miW53w/K9Or/4QogJss= +github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE= +github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY= +github.com/golangci/golangci-lint/v2 v2.10.1 h1:flhw5Px6ojbLyEFzXvJn5B2HEdkkRlkhE1SnmCbQBiE= +github.com/golangci/golangci-lint/v2 v2.10.1/go.mod h1:dBsrOk6zj0vDhlTv+IiJGqkDokR24IVTS7W3EVfPTQY= +github.com/golangci/golines v0.15.0 h1:Qnph25g8Y1c5fdo1X7GaRDGgnMHgnxh4Gk4VfPTtRx0= +github.com/golangci/golines v0.15.0/go.mod h1:AZjXd23tbHMpowhtnGlj9KCNsysj72aeZVVHnVcZx10= +github.com/golangci/misspell v0.8.0 h1:qvxQhiE2/5z+BVRo1kwYA8yGz+lOlu5Jfvtx2b04Jbg= +github.com/golangci/misspell v0.8.0/go.mod h1:WZyyI2P3hxPY2UVHs3cS8YcllAeyfquQcKfdeE9AFVg= +github.com/golangci/plugin-module-register v0.1.2 h1:e5WM6PO6NIAEcij3B053CohVp3HIYbzSuP53UAYgOpg= +github.com/golangci/plugin-module-register v0.1.2/go.mod h1:1+QGTsKBvAIvPvoY/os+G5eoqxWn70HYDm2uvUyGuVw= +github.com/golangci/revgrep v0.8.0 h1:EZBctwbVd0aMeRnNUsFogoyayvKHyxlV3CdUA46FX2s= +github.com/golangci/revgrep v0.8.0/go.mod h1:U4R/s9dlXZsg8uJmaR1GrloUr14D7qDl8gi2iPXJH8k= +github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e h1:ai0EfmVYE2bRA5htgAG9r7s3tHsfjIhN98WshBTJ9jM= +github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e/go.mod h1:Vrn4B5oR9qRwM+f54koyeH3yzphlecwERs0el27Fr/s= +github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e h1:gD6P7NEo7Eqtt0ssnqSJNNndxe69DOQ24A5h7+i3KpM= +github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e/go.mod h1:h+wZwLjUTJnm/P2rwlbJdRPZXOzaT36/FwnPnY2inzc= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83 h1:z2ogiKUYzX5Is6zr/vP9vJGqPwcdqsWjOt+V8J7+bTc= +github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gordonklaus/ineffassign v0.2.0 h1:Uths4KnmwxNJNzq87fwQQDDnbNb7De00VOk9Nu0TySs= +github.com/gordonklaus/ineffassign v0.2.0/go.mod h1:TIpymnagPSexySzs7F9FnO1XFTy8IT3a59vmZp5Y9Lw= +github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk= +github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc= +github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM= +github.com/gostaticanalysis/comment v1.5.0 h1:X82FLl+TswsUMpMh17srGRuKaaXprTaytmEpgnKIDu8= +github.com/gostaticanalysis/comment v1.5.0/go.mod h1:V6eb3gpCv9GNVqb6amXzEUX3jXLVK/AdA+IrAMSqvEc= +github.com/gostaticanalysis/forcetypeassert v0.2.0 h1:uSnWrrUEYDr86OCxWa4/Tp2jeYDlogZiZHzGkWFefTk= +github.com/gostaticanalysis/forcetypeassert v0.2.0/go.mod h1:M5iPavzE9pPqWyeiVXSFghQjljW1+l/Uke3PXHS6ILY= +github.com/gostaticanalysis/nilerr v0.1.2 h1:S6nk8a9N8g062nsx63kUkF6AzbHGw7zzyHMcpu52xQU= +github.com/gostaticanalysis/nilerr v0.1.2/go.mod h1:A19UHhoY3y8ahoL7YKz6sdjDtduwTSI4CsymaC2htPA= +github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= +github.com/gostaticanalysis/testutil v0.5.0 h1:Dq4wT1DdTwTGCQQv3rl3IvD5Ld0E6HiY+3Zh0sUGqw8= +github.com/gostaticanalysis/testutil v0.5.0/go.mod h1:OLQSbuM6zw2EvCcXTz1lVq5unyoNft372msDY0nY5Hs= +github.com/hashicorp/cli v1.1.7 h1:/fZJ+hNdwfTSfsxMBa9WWMlfjUZbX8/LnUxgAd7lCVU= +github.com/hashicorp/cli v1.1.7/go.mod h1:e6Mfpga9OCT1vqzFuoGZiiF/KaG9CbUfO5s3ghU3YgU= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -70,6 +406,9 @@ github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-immutable-radix/v2 v2.1.0 h1:CUW5RYIcysz+D3B+l1mDeXrQ7fUvGGCwJfdASSzbrfo= +github.com/hashicorp/go-immutable-radix/v2 v2.1.0/go.mod h1:hgdqLXA4f6NIjRVisM1TJ9aOJVNRqKZj+xDGF6m7PBw= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-plugin v1.7.0 h1:YghfQH/0QmPNc/AZMTFE3ac8fipZyZECHdDPshfk+mA= @@ -79,10 +418,17 @@ github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3 github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4= github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hc-install v0.9.3 h1:1H4dgmgzxEVwT6E/d/vIL5ORGVKz9twRwDw+qA5Hyho= github.com/hashicorp/hc-install v0.9.3/go.mod h1:FQlQ5I3I/X409N/J1U4pPeQQz1R3BoV0IysB7aiaQE0= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl/v2 v2.24.0 h1:2QJdZ454DSsYGoaE6QheQZjtKZSUs9Nh2izTWiwQxvE= github.com/hashicorp/hcl/v2 v2.24.0/go.mod h1:oGoO1FIQYfn/AgyOhlg9qLC6/nOJPX3qGbkZpYAcqfM= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= @@ -91,6 +437,14 @@ github.com/hashicorp/terraform-exec v0.25.0 h1:Bkt6m3VkJqYh+laFMrWIpy9KHYFITpOyz github.com/hashicorp/terraform-exec v0.25.0/go.mod h1:dl9IwsCfklDU6I4wq9/StFDp7dNbH/h5AnfS1RmiUl8= github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU= github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE= +github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1 h1:eaI/3dsu2T5QAXbA+7N+B+UBj20GdtYnsRuYypKh3S4= +github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1/go.mod h1:kpYM23L7NtcfaQdWAN0QFkV/lU0w16qJ2ddAPCI4zAg= +github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0 h1:IKpc337XKk50QyQPSxLrHwdqSo1E2XqCMxFkWsZcTvc= +github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0/go.mod h1:tT6wl80h7nsMBw+1yZRgJXi+Ys85PUai11weDqysvp4= +github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 h1:91dQG1A/DxP6vRz9GiytDTrZTXDbhHPvmpYnAyWA/Vw= +github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0/go.mod h1:fywrEKpordQypmAjz/HIfm2LuNVmyJ6KDe8XT9GdJxQ= +github.com/hashicorp/terraform-plugin-docs v0.24.0 h1:YNZYd+8cpYclQyXbl1EEngbld8w7/LPOm99GD5nikIU= +github.com/hashicorp/terraform-plugin-docs v0.24.0/go.mod h1:YLg+7LEwVmRuJc0EuCw0SPLxuQXw5mW8iJ5ml/kvi+o= github.com/hashicorp/terraform-plugin-framework v1.17.0 h1:JdX50CFrYcYFY31gkmitAEAzLKoBgsK+iaJjDC8OexY= github.com/hashicorp/terraform-plugin-framework v1.17.0/go.mod h1:4OUXKdHNosX+ys6rLgVlgklfxN3WHR5VHSOABeS/BM0= github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow= @@ -109,8 +463,19 @@ github.com/hashicorp/terraform-svchost v0.2.0 h1:wVc2vMiodOHvNZcQw/3y9af1XSomgjG github.com/hashicorp/terraform-svchost v0.2.0/go.mod h1:/98rrS2yZsbppi4VGVCjwYmh8dqsKzISqK7Hli+0rcQ= github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= +github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= +github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= @@ -120,12 +485,40 @@ github.com/jarcoal/httpmock v1.4.1 h1:0Ju+VCFuARfFlhVXFc2HxlcQkfB+Xq12/EotHko+x2 github.com/jarcoal/httpmock v1.4.1/go.mod h1:ftW1xULwo+j0R0JJkJIIi7UKigZUXCLLanykgjwBXL0= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= +github.com/jgautheron/goconst v1.8.2 h1:y0XF7X8CikZ93fSNT6WBTb/NElBu9IjaY7CCYQrCMX4= +github.com/jgautheron/goconst v1.8.2/go.mod h1:A0oxgBCHy55NQn6sYpO7UdnA9p+h7cPtoOZUmvNIako= github.com/jhump/protoreflect v1.17.0 h1:qOEr613fac2lOuTgWN4tPAtLL7fUSbuJL5X5XumQh94= github.com/jhump/protoreflect v1.17.0/go.mod h1:h9+vUUL38jiBzck8ck+6G/aeMX8Z4QUY/NiJPwPNi+8= +github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs= +github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c= +github.com/jjti/go-spancheck v0.6.5 h1:lmi7pKxa37oKYIMScialXUK6hP3iY5F1gu+mLBPgYB8= +github.com/jjti/go-spancheck v0.6.5/go.mod h1:aEogkeatBrbYsyW6y5TgDfihCulDYciL1B7rG2vSsrU= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/julz/importas v0.2.0 h1:y+MJN/UdL63QbFJHws9BVC5RpA2iq0kpjrFajTGivjQ= +github.com/julz/importas v0.2.0/go.mod h1:pThlt589EnCYtMnmhmRYY/qn9lCf/frPOK+WMx3xiJY= +github.com/karamaru-alpha/copyloopvar v1.2.2 h1:yfNQvP9YaGQR7VaWLYcfZUlRP2eo2vhExWKxD/fP6q0= +github.com/karamaru-alpha/copyloopvar v1.2.2/go.mod h1:oY4rGZqZ879JkJMtX3RRkcXRkmUvH0x35ykgaKgsgJY= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= +github.com/kisielk/errcheck v1.9.0 h1:9xt1zI9EBfcYBvdU1nVrzMzzUPUtPKs9bVSIM3TAb3M= +github.com/kisielk/errcheck v1.9.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kkHAIKE/contextcheck v1.1.6 h1:7HIyRcnyzxL9Lz06NGhiKvenXq7Zw6Q0UQu/ttjfJCE= +github.com/kkHAIKE/contextcheck v1.1.6/go.mod h1:3dDbMRNBFaq8HFXWC1JyvDSPm43CmE6IuHam8Wr0rkg= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= @@ -133,8 +526,48 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kulti/thelper v0.7.1 h1:fI8QITAoFVLx+y+vSyuLBP+rcVIB8jKooNSCT2EiI98= +github.com/kulti/thelper v0.7.1/go.mod h1:NsMjfQEy6sd+9Kfw8kCP61W1I0nerGSYSFnGaxQkcbs= +github.com/kunwardeep/paralleltest v1.0.15 h1:ZMk4Qt306tHIgKISHWFJAO1IDQJLc6uDyJMLyncOb6w= +github.com/kunwardeep/paralleltest v1.0.15/go.mod h1:di4moFqtfz3ToSKxhNjhOZL+696QtJGCFe132CbBLGk= +github.com/lasiar/canonicalheader v1.1.2 h1:vZ5uqwvDbyJCnMhmFYimgMZnJMjwljN5VGY0VKbMXb4= +github.com/lasiar/canonicalheader v1.1.2/go.mod h1:qJCeLFS0G/QlLQ506T+Fk/fWMa2VmBUiEI2cuMK4djI= +github.com/ldez/exptostd v0.4.5 h1:kv2ZGUVI6VwRfp/+bcQ6Nbx0ghFWcGIKInkG/oFn1aQ= +github.com/ldez/exptostd v0.4.5/go.mod h1:QRjHRMXJrCTIm9WxVNH6VW7oN7KrGSht69bIRwvdFsM= github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1 h1:QJRB9Gs5i/h6TVJI6yl09Qm6rNooznRiKwIw+VIxd90= github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1/go.mod h1:0eUeas7XtKDPKQbB0KijfaMPbuQ/cIprtoTRiwaUoFg= +github.com/ldez/gomoddirectives v0.8.0 h1:JqIuTtgvFC2RdH1s357vrE23WJF2cpDCPFgA/TWDGpk= +github.com/ldez/gomoddirectives v0.8.0/go.mod h1:jutzamvZR4XYJLr0d5Honycp4Gy6GEg2mS9+2YX3F1Q= +github.com/ldez/grignotin v0.10.1 h1:keYi9rYsgbvqAZGI1liek5c+jv9UUjbvdj3Tbn5fn4o= +github.com/ldez/grignotin v0.10.1/go.mod h1:UlDbXFCARrXbWGNGP3S5vsysNXAPhnSuBufpTEbwOas= +github.com/ldez/structtags v0.6.1 h1:bUooFLbXx41tW8SvkfwfFkkjPYvFFs59AAMgVg6DUBk= +github.com/ldez/structtags v0.6.1/go.mod h1:YDxVSgDy/MON6ariaxLF2X09bh19qL7MtGBN5MrvbdY= +github.com/ldez/tagliatelle v0.7.2 h1:KuOlL70/fu9paxuxbeqlicJnCspCRjH0x8FW+NfgYUk= +github.com/ldez/tagliatelle v0.7.2/go.mod h1:PtGgm163ZplJfZMZ2sf5nhUT170rSuPgBimoyYtdaSI= +github.com/ldez/usetesting v0.5.0 h1:3/QtzZObBKLy1F4F8jLuKJiKBjjVFi1IavpoWbmqLwc= +github.com/ldez/usetesting v0.5.0/go.mod h1:Spnb4Qppf8JTuRgblLrEWb7IE6rDmUpGvxY3iRrzvDQ= +github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84YrjT3mIY= +github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/macabu/inamedparam v0.2.0 h1:VyPYpOc10nkhI2qeNUdh3Zket4fcZjEWe35poddBCpE= +github.com/macabu/inamedparam v0.2.0/go.mod h1:+Pee9/YfGe5LJ62pYXqB89lJ+0k5bsR8Wgz/C0Zlq3U= +github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/manuelarte/embeddedstructfieldcheck v0.4.0 h1:3mAIyaGRtjK6EO9E73JlXLtiy7ha80b2ZVGyacxgfww= +github.com/manuelarte/embeddedstructfieldcheck v0.4.0/go.mod h1:z8dFSyXqp+fC6NLDSljRJeNQJJDWnY7RoWFzV3PC6UM= +github.com/manuelarte/funcorder v0.5.0 h1:llMuHXXbg7tD0i/LNw8vGnkDTHFpTnWqKPI85Rknc+8= +github.com/manuelarte/funcorder v0.5.0/go.mod h1:Yt3CiUQthSBMBxjShjdXMexmzpP8YGvGLjrxJNkO2hA= +github.com/maratori/testableexamples v1.0.1 h1:HfOQXs+XgfeRBJ+Wz0XfH+FHnoY9TVqL6Fcevpzy4q8= +github.com/maratori/testableexamples v1.0.1/go.mod h1:XE2F/nQs7B9N08JgyRmdGjYVGqxWwClLPCGSQhXQSrQ= +github.com/maratori/testpackage v1.1.2 h1:ffDSh+AgqluCLMXhM19f/cpvQAKygKAJXFl9aUjmbqs= +github.com/maratori/testpackage v1.1.2/go.mod h1:8F24GdVDFW5Ew43Et02jamrVMNXLUNaOynhDssITGfc= +github.com/matoous/godox v1.1.0 h1:W5mqwbyWrwZv6OQ5Z1a/DHGMOvXYCBP3+Ht7KMoJhq4= +github.com/matoous/godox v1.1.0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs= +github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= @@ -143,49 +576,227 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/maxatome/go-testdeep v1.14.0 h1:rRlLv1+kI8eOI3OaBXZwb3O7xY3exRzdW5QyX48g9wI= github.com/maxatome/go-testdeep v1.14.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM= +github.com/mgechev/revive v1.14.0 h1:CC2Ulb3kV7JFYt+izwORoS3VT/+Plb8BvslI/l1yZsc= +github.com/mgechev/revive v1.14.0/go.mod h1:MvnujelCZBZCaoDv5B3foPo6WWgULSSFxvfxp7GsPfo= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/moricho/tparallel v0.3.2 h1:odr8aZVFA3NZrNybggMkYO3rgPRcqjeQUlBBFVxKHTI= +github.com/moricho/tparallel v0.3.2/go.mod h1:OQ+K3b4Ln3l2TZveGCywybl68glfLEwFGqvnjok8b+U= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U= +github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE= +github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhKRf3Swg= +github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs= +github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= +github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= +github.com/nunnatsa/ginkgolinter v0.23.0 h1:x3o4DGYOWbBMP/VdNQKgSj+25aJKx2Pe6lHr8gBcgf8= +github.com/nunnatsa/ginkgolinter v0.23.0/go.mod h1:9qN1+0akwXEccwV1CAcCDfcoBlWXHB+ML9884pL4SZ4= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E= github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.10.2/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= +github.com/onsi/ginkgo/v2 v2.28.1 h1:S4hj+HbZp40fNKuLUQOYLDgZLwNUVn19N3Atb98NCyI= +github.com/onsi/ginkgo/v2 v2.28.1/go.mod h1:CLtbVInNckU3/+gC8LzkGUb9oF+e8W8TdUsxPwvdOgE= +github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= +github.com/onsi/gomega v1.39.1 h1:1IJLAad4zjPn2PsnhH70V4DKRFlrCzGBNrNaru+Vf28= +github.com/onsi/gomega v1.39.1/go.mod h1:hL6yVALoTOxeWudERyfppUcZXjMwIMLnuSfruD2lcfg= +github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= +github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU= +github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w= +github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= +github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= +github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= +github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= +github.com/pb33f/libopenapi v0.15.0 h1:AoBYIY3HXqDDF8O9kcudlqWaRFZZJmgtueE649oHzIw= +github.com/pb33f/libopenapi v0.15.0/go.mod h1:m+4Pwri31UvcnZjuP8M7TlbR906DXJmMvYsbis234xg= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/quasilyte/go-ruleguard v0.4.5 h1:AGY0tiOT5hJX9BTdx/xBdoCubQUAE2grkqY2lSwvZcA= +github.com/quasilyte/go-ruleguard v0.4.5/go.mod h1:Vl05zJ538vcEEwu16V/Hdu7IYZWyKSwIy4c88Ro1kRE= +github.com/quasilyte/go-ruleguard/dsl v0.3.23 h1:lxjt5B6ZCiBeeNO8/oQsegE6fLeCzuMRoVWSkXC4uvY= +github.com/quasilyte/go-ruleguard/dsl v0.3.23/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo= +github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng= +github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU= +github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= +github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= +github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ= +github.com/raeperd/recvcheck v0.2.0 h1:GnU+NsbiCqdC2XX5+vMZzP+jAJC5fht7rcVTAhX74UI= +github.com/raeperd/recvcheck v0.2.0/go.mod h1:n04eYkwIR0JbgD73wT8wL4JjPC3wm0nFtzBnWNocnYU= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryancurrah/gomodguard v1.4.1 h1:eWC8eUMNZ/wM/PWuZBv7JxxqT5fiIKSIyTvjb7Elr+g= +github.com/ryancurrah/gomodguard v1.4.1/go.mod h1:qnMJwV1hX9m+YJseXEBhd2s90+1Xn6x9dLz11ualI1I= +github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU= +github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ= +github.com/sanposhiho/wastedassign/v2 v2.1.0 h1:crurBF7fJKIORrV85u9UUpePDYGWnwvv3+A96WvwXT0= +github.com/sanposhiho/wastedassign/v2 v2.1.0/go.mod h1:+oSmSC+9bQ+VUAxA66nBb0Z7N8CK7mscKTDYC6aIek4= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 h1:KRzFb2m7YtdldCEkzs6KqmJw4nqEVZGK7IN2kJkjTuQ= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= +github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw= +github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ= +github.com/sashamelentyev/usestdlibvars v1.29.0 h1:8J0MoRrw4/NAXtjQqTHrbW9NN+3iMf7Knkq057v4XOQ= +github.com/sashamelentyev/usestdlibvars v1.29.0/go.mod h1:8PpnjHMk5VdeWlVb4wCdrB8PNbLqZ3wBZTZWkrpZZL8= +github.com/securego/gosec/v2 v2.23.0 h1:h4TtF64qFzvnkqvsHC/knT7YC5fqyOCItlVR8+ptEBo= +github.com/securego/gosec/v2 v2.23.0/go.mod h1:qRHEgXLFuYUDkI2T7W7NJAmOkxVhkR0x9xyHOIcMNZ0= +github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= +github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= +github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w= +github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g= +github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE= +github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4= github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= +github.com/sonatard/noctx v0.4.0 h1:7MC/5Gg4SQ4lhLYR6mvOP6mQVSxCrdyiExo7atBs27o= +github.com/sonatard/noctx v0.4.0/go.mod h1:64XdbzFb18XL4LporKXp8poqZtPKbCrqQ402CV+kJas= +github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0= +github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= +github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I= +github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= +github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g= github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.12.0 h1:CZ7eSOd3kZoaYDLbXnmzgQI5RlciuXBMA+18HwHRfZQ= +github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI= +github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0= +github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= github.com/stackitcloud/stackit-sdk-go/core v0.21.1 h1:Y/PcAgM7DPYMNqum0MLv4n1mF9ieuevzcCIZYQfm3Ts= github.com/stackitcloud/stackit-sdk-go/core v0.21.1/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI= github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha h1:ugpMOMUZGB0yXsWcfe97F7GCdjlexbjFuGD8ZeyMSts= github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h1:v5VGvTxLcCdJJmblbhqYalt/MFHcElDfYoy15CMhaWs= +github.com/stbenjam/no-sprintf-host-port v0.3.1 h1:AyX7+dxI4IdLBPtDbsGAyqiTSLpCP9hWRrXQDU4Cm/g= +github.com/stbenjam/no-sprintf-host-port v0.3.1/go.mod h1:ODbZesTCHMVKthBHskvUUexdcNHAQRXk9NpSsL8p/HQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= +github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= github.com/teambition/rrule-go v1.8.2 h1:lIjpjvWTj9fFUZCmuoVDrKVOtdiyzbzc93qTmRVe/J8= github.com/teambition/rrule-go v1.8.2/go.mod h1:Ieq5AbrKGciP1V//Wq8ktsTXwSwJHDD5mD/wLBGl3p4= +github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= +github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= +github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= +github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= +github.com/tetafro/godot v1.5.4 h1:u1ww+gqpRLiIA16yF2PV1CV1n/X3zhyezbNXC3E14Sg= +github.com/tetafro/godot v1.5.4/go.mod h1:eOkMrVQurDui411nBY2FA05EYH01r14LuWY/NrVDVcU= +github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 h1:9LPGD+jzxMlnk5r6+hJnar67cgpDIz/iyD+rfl5r2Vk= +github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67/go.mod h1:mkjARE7Yr8qU23YcGMSALbIxTQ9r9QBVahQOBRfU460= +github.com/timonwong/loggercheck v0.11.0 h1:jdaMpYBl+Uq9mWPXv1r8jc5fC3gyXx4/WGwTnnNKn4M= +github.com/timonwong/loggercheck v0.11.0/go.mod h1:HEAWU8djynujaAVX7QI65Myb8qgfcZ1uKbdpg3ZzKl8= +github.com/tomarrell/wrapcheck/v2 v2.12.0 h1:H/qQ1aNWz/eeIhxKAFvkfIA+N7YDvq6TWVFL27Of9is= +github.com/tomarrell/wrapcheck/v2 v2.12.0/go.mod h1:AQhQuZd0p7b6rfW+vUwHm5OMCGgp63moQ9Qr/0BpIWo= +github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw= +github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= +github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLkI= +github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA= +github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g= +github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8= +github.com/uudashr/gocognit v1.2.0 h1:3BU9aMr1xbhPlvJLSydKwdLN3tEUUrzPSSM8S4hDYRA= +github.com/uudashr/gocognit v1.2.0/go.mod h1:k/DdKPI6XBZO1q7HgoV2juESI2/Ofj9AcHPZhBBdrTU= +github.com/uudashr/iface v1.4.1 h1:J16Xl1wyNX9ofhpHmQ9h9gk5rnv2A6lX/2+APLTo0zU= +github.com/uudashr/iface v1.4.1/go.mod h1:pbeBPlbuU2qkNDn0mmfrxP2X+wjPMIQAy+r1MBXSXtg= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI= github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= @@ -193,13 +804,63 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/vmware-labs/yaml-jsonpath v0.3.2 h1:/5QKeCBGdsInyDCyVNLbXyilb61MXGi9NP674f9Hobk= +github.com/vmware-labs/yaml-jsonpath v0.3.2/go.mod h1:U6whw1z03QyqgWdgXxvVnQ90zN1BWz5V+51Ewf8k+rQ= +github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc= +github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xen0n/gosmopolitan v1.3.0 h1:zAZI1zefvo7gcpbCOrPSHJZJYA9ZgLfJqtKzZ5pHqQM= +github.com/xen0n/gosmopolitan v1.3.0/go.mod h1:rckfr5T6o4lBtM1ga7mLGKZmLxswUoH1zxHgNXOsEt4= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= +github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM= +github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk= +github.com/yeya24/promlinter v0.3.0 h1:JVDbMp08lVCP7Y6NP3qHroGAO6z2yGKQtS5JsjqtoFs= +github.com/yeya24/promlinter v0.3.0/go.mod h1:cDfJQQYv9uYciW60QT0eeHlFodotkYZlL+YcPQN+mW4= +github.com/ykadowak/zerologlint v0.1.5 h1:Gy/fMz1dFQN9JZTPjv1hxEk+sRWm05row04Yoolgdiw= +github.com/ykadowak/zerologlint v0.1.5/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/goldmark v1.7.7 h1:5m9rrB1sW3JUMToKFQfb+FGt1U7r57IHu5GrYrG2nqU= +github.com/yuin/goldmark v1.7.7/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= +github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= +github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= github.com/zclconf/go-cty v1.17.0 h1:seZvECve6XX4tmnvRzWtJNHdscMtYEx5R7bnnVyd/d0= github.com/zclconf/go-cty v1.17.0/go.mod h1:wqFzcImaLTI6A5HfsRwB0nj5n0MRZFwmey8YoFPPs3U= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= +gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo= +gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8= +go-simpler.org/assert v0.9.0 h1:PfpmcSvL7yAnWyChSjOz6Sp6m9j5lyK8Ok9pEL31YkQ= +go-simpler.org/assert v0.9.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28= +go-simpler.org/musttag v0.14.0 h1:XGySZATqQYSEV3/YTy+iX+aofbZZllJaqwFWs+RTtSo= +go-simpler.org/musttag v0.14.0/go.mod h1:uP8EymctQjJ4Z1kUnjX0u2l60WfUdQxCwSNKzE1JEOE= +go-simpler.org/sloglint v0.11.1 h1:xRbPepLT/MHPTCA6TS/wNfZrDzkGvCCqUv4Bdwc3H7s= +go-simpler.org/sloglint v0.11.1/go.mod h1:2PowwiCOK8mjiF+0KGifVOT8ZsCNiFzvfyJeJOIt8MQ= +go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw= +go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU= +go.augendre.info/arangolint v0.4.0 h1:xSCZjRoS93nXazBSg5d0OGCi9APPLNMmmLrC995tR50= +go.augendre.info/arangolint v0.4.0/go.mod h1:l+f/b4plABuFISuKnTGD4RioXiCCgghv2xqst/xOvAA= +go.augendre.info/fatcontext v0.9.0 h1:Gt5jGD4Zcj8CDMVzjOJITlSb9cEch54hjRRlN3qDojE= +go.augendre.info/fatcontext v0.9.0/go.mod h1:L94brOAT1OOUNue6ph/2HnwxoNlds9aXDF2FcUntbNw= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48= @@ -212,76 +873,414 @@ go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2W go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew= go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI= go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ= +go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o= +golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8= +golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 h1:qWFG1Dj7TBjOjOvhEOkmyGPVoquqUKnIU0lEVLp8xyk= +golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358/go.mod h1:4Mzdyp/6jzw9auFDJ3OMF5qksa7UvPnzKqTVGcb04ms= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8= golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60= golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1 h1:QNaHp8YvpPswfDNxlCmJyeesxbGOgaKf41iT9/QrErY= -golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1/go.mod h1:NuITXsA9cTiqnXtVk+/wrBT2Ja4X5hsfGOYRJ6kgYjs= +golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4 h1:bTLqdHv7xrGlFbvf5/TXNxy/iUwwdkjhqQTJDjW7aj0= +golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4/go.mod h1:g5NllXBEermZrmR51cJDQxmJUHUOfRAaNyWBM+R+548= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg= golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= +golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k= golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0= +golang.org/x/tools/go/expect v0.1.1-deprecated h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM= +golang.org/x/tools/go/expect v0.1.1-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY= +golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated h1:1h2MnaIAIXISqTFKdENegdpAgUXz6NrPEsbIeWaBRvM= +golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated/go.mod h1:RVAQXBGNv1ib0J382/DPCRS/BPnsGebyM1Gj5VSDpG8= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 h1:mWPCjDEyshlQYzBpMNHaEof6UX1PmHcaUODUywQ0uac= google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.79.1 h1:zGhSi45ODB9/p3VAawt9a+O/MULLl9dpizzNNpq7flY= google.golang.org/grpc v1.79.1/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20191026110619-0b21df46bc1d/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.7.0 h1:w6WUp1VbkqPEgLz4rkBzH/CSU6HkoqNLp6GstyTx3lU= +honnef.co/go/tools v0.7.0/go.mod h1:pm29oPxeP3P82ISxZDgIYeOaf9ta6Pi0EWvCFoLG2vc= +mvdan.cc/gofumpt v0.9.2 h1:zsEMWL8SVKGHNztrx6uZrXdp7AX8r421Vvp23sz7ik4= +mvdan.cc/gofumpt v0.9.2/go.mod h1:iB7Hn+ai8lPvofHd9ZFGVg2GOr8sBUw1QUWjNbmIL/s= +mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 h1:ssMzja7PDPJV8FStj7hq9IKiuiKhgz9ErWw+m68e7DI= +mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15/go.mod h1:4M5MMXl2kW6fivUT6yRGpLLPNfuGtU2Z0cPvFquGDYU= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/golang-ci.yaml b/golang-ci.yaml index 42d943f2..11f74066 100644 --- a/golang-ci.yaml +++ b/golang-ci.yaml @@ -2,6 +2,13 @@ version: "2" run: concurrency: 4 +output: + formats: + text: + print-linter-name: true + print-issued-lines: true + colors: true + path: stdout linters: enable: - bodyclose @@ -68,6 +75,10 @@ linters: - name: empty-lines - name: early-return exclusions: + paths: + - stackit-sdk-generator/ + - generated/ + - pkg_gen/ generated: lax warn-unused: true # Excluding configuration per-path, per-linter, per-text and per-source. @@ -76,14 +87,6 @@ linters: - path: _test\.go linters: - gochecknoinits - paths: - - third_party/ - - builtin/ - - examples/ - - tools/copy.go - - tools/main.go - - pkg_gen/ - - cmd/ formatters: enable: - gofmt @@ -91,12 +94,4 @@ formatters: settings: goimports: local-prefixes: - - tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview - exclusions: - generated: lax - paths: - - third_party/ - - builtin/ - - examples/ - - pkg_gen/ - - cmd/ + - tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview \ No newline at end of file diff --git a/internal/testutils/activateMocks.go b/internal/testutils/activateMocks.go index ddeaff83..c8f7dd05 100644 --- a/internal/testutils/activateMocks.go +++ b/internal/testutils/activateMocks.go @@ -20,13 +20,20 @@ func TestName() string { } func ActivateEnvironmentHttpMocks() { - httpmock.RegisterNoResponder(func(req *http.Request) (*http.Response, error) { - return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL) - }) - - httpmock.RegisterRegexpResponder("GET", regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`), + httpmock.RegisterNoResponder( func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(http.StatusOK, httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String()), nil - }) + return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL) + }, + ) + httpmock.RegisterRegexpResponder( + "GET", + regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`), + func(_ *http.Request) (*http.Response, error) { + return httpmock.NewStringResponse( + http.StatusOK, + httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String(), + ), nil + }, + ) } diff --git a/scripts/lint-golangci-lint.sh b/scripts/lint-golangci-lint.sh deleted file mode 100755 index 0a883589..00000000 --- a/scripts/lint-golangci-lint.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -# This script lints the SDK modules and the internal examples -# Pre-requisites: golangci-lint -set -eo pipefail - -ROOT_DIR=$(git rev-parse --show-toplevel) -GOLANG_CI_YAML_PATH="${ROOT_DIR}/golang-ci.yaml" -GOLANG_CI_ARGS="--allow-parallel-runners --timeout=5m --config=${GOLANG_CI_YAML_PATH}" - -if type -p golangci-lint >/dev/null; then - : -else - echo "golangci-lint not installed, unable to proceed." - exit 1 -fi - -cd ${ROOT_DIR} -golangci-lint run ${GOLANG_CI_ARGS} diff --git a/scripts/project.sh b/scripts/project.sh index 1d570c6a..68585774 100755 --- a/scripts/project.sh +++ b/scripts/project.sh @@ -17,11 +17,7 @@ elif [ "$action" = "tools" ]; then go mod download - # go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.0 - go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2 - - # go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.21.0 - go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0 + go install golang.org/x/tools/cmd/goimports@v0.42.0 else echo "Invalid action: '$action', please use $0 help for help" fi diff --git a/scripts/tfplugindocs.sh b/scripts/tfplugindocs.sh index 6f9d5d1b..e77b6a98 100755 --- a/scripts/tfplugindocs.sh +++ b/scripts/tfplugindocs.sh @@ -14,5 +14,5 @@ fi mkdir -p ${ROOT_DIR}/docs echo ">> Generating documentation" -tfplugindocs generate \ +go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate \ --provider-name "stackitprivatepreview" diff --git a/stackit/internal/core/core.go b/stackit/internal/core/core.go index d3ea252c..3680ae65 100644 --- a/stackit/internal/core/core.go +++ b/stackit/internal/core/core.go @@ -32,7 +32,7 @@ const ( type EphemeralProviderData struct { ProviderData - PrivateKey string + PrivateKey string //nolint:gosec //this is a placeholder and not used in this code PrivateKeyPath string ServiceAccountKey string ServiceAccountKeyPath string @@ -105,11 +105,13 @@ func DiagsToError(diags diag.Diagnostics) error { diagsError := diags.Errors() diagsStrings := make([]string, 0) for _, diagnostic := range diagsError { - diagsStrings = append(diagsStrings, fmt.Sprintf( - "(%s) %s", - diagnostic.Summary(), - diagnostic.Detail(), - )) + diagsStrings = append( + diagsStrings, fmt.Sprintf( + "(%s) %s", + diagnostic.Summary(), + diagnostic.Detail(), + ), + ) } return fmt.Errorf("%s", strings.Join(diagsStrings, ";")) } @@ -136,14 +138,22 @@ func LogAndAddWarning(ctx context.Context, diags *diag.Diagnostics, summary, det func LogAndAddWarningBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) { warnTitle := fmt.Sprintf("The %s %q is in beta", resourceType, name) - warnContent := fmt.Sprintf("The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.", resourceType, name) + warnContent := fmt.Sprintf( + "The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.", + resourceType, + name, + ) tflog.Warn(ctx, fmt.Sprintf("%s | %s", warnTitle, warnContent)) diags.AddWarning(warnTitle, warnContent) } func LogAndAddErrorBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) { errTitle := fmt.Sprintf("The %s %q is in beta and beta is not enabled", resourceType, name) - errContent := fmt.Sprintf(`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`, resourceType, name) + errContent := fmt.Sprintf( + `The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`, + resourceType, + name, + ) tflog.Error(ctx, fmt.Sprintf("%s | %s", errTitle, errContent)) diags.AddError(errTitle, errContent) } @@ -161,8 +171,10 @@ func LogResponse(ctx context.Context) context.Context { traceId := runtime.GetTraceId(ctx) ctx = tflog.SetField(ctx, "x-trace-id", traceId) - tflog.Info(ctx, "response data", map[string]interface{}{ - "x-trace-id": traceId, - }) + tflog.Info( + ctx, "response data", map[string]interface{}{ + "x-trace-id": traceId, + }, + ) return ctx } diff --git a/stackit/internal/core/retry_round_tripper.go b/stackit/internal/core/retry_round_tripper.go index 945a3061..568be431 100644 --- a/stackit/internal/core/retry_round_tripper.go +++ b/stackit/internal/core/retry_round_tripper.go @@ -98,7 +98,7 @@ func (rrt *RetryRoundTripper) retryLoop( waitDuration := rrt.calculateWaitDurationWithJitter(ctx, currentDelay) if err := rrt.waitForDelay(ctx, waitDuration); err != nil { - return nil, err // Context was cancelled during wait. + return nil, err // Context was canceled during wait. } // Exponential backoff for the next potential retry. @@ -153,7 +153,6 @@ func (rrt *RetryRoundTripper) handleFinalError( ) error { if resp != nil { if err := resp.Body.Close(); err != nil { - tflog.Warn( ctx, "Failed to close response body", map[string]interface{}{ "error": err.Error(), @@ -194,7 +193,6 @@ func (rrt *RetryRoundTripper) shouldRetry(resp *http.Response, err error) bool { } return false - } // calculateWaitDurationWithJitter calculates the backoff duration for the next retry, @@ -232,7 +230,7 @@ func (rrt *RetryRoundTripper) calculateWaitDurationWithJitter( func (rrt *RetryRoundTripper) waitForDelay(ctx context.Context, delay time.Duration) error { select { case <-ctx.Done(): - return fmt.Errorf("context cancelled during backoff wait: %w", ctx.Err()) + return fmt.Errorf("context canceled during backoff wait: %w", ctx.Err()) case <-time.After(delay): return nil } diff --git a/stackit/internal/core/retry_round_tripper_test.go b/stackit/internal/core/retry_round_tripper_test.go index e19c553c..ac84db8b 100644 --- a/stackit/internal/core/retry_round_tripper_test.go +++ b/stackit/internal/core/retry_round_tripper_test.go @@ -72,7 +72,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) { }, } tripper := testRetryConfig(mock) - req := httptest.NewRequest(http.MethodGet, "/", nil) + req := httptest.NewRequest(http.MethodGet, "/", http.NoBody) resp, err := tripper.RoundTrip(req) if resp != nil { @@ -110,7 +110,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) { }, } tripper := testRetryConfig(mock) - req := httptest.NewRequest(http.MethodGet, "/", nil) + req := httptest.NewRequest(http.MethodGet, "/", http.NoBody) resp, err := tripper.RoundTrip(req) if resp != nil { @@ -155,7 +155,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) { }, nil } tripper := testRetryConfig(mock) - req := httptest.NewRequest(http.MethodGet, "/", nil) + req := httptest.NewRequest(http.MethodGet, "/", http.NoBody) resp, err := tripper.RoundTrip(req) if resp != nil { @@ -185,12 +185,12 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) { mockErr := errors.New("simulated network error") mock := &mockRoundTripper{ - roundTripFunc: func(req *http.Request) (*http.Response, error) { + roundTripFunc: func(_ *http.Request) (*http.Response, error) { return nil, mockErr }, } tripper := testRetryConfig(mock) - req := httptest.NewRequest(http.MethodGet, "/", nil) + req := httptest.NewRequest(http.MethodGet, "/", http.NoBody) resp, err := tripper.RoundTrip(req) if resp != nil { @@ -211,7 +211,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) { ) t.Run( - "should abort retries if the main context is cancelled", func(t *testing.T) { + "should abort retries if the main context is canceled", func(t *testing.T) { t.Parallel() mock := &mockRoundTripper{ @@ -230,7 +230,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) { ctx, cancel := context.WithTimeout(baseCtx, 20*time.Millisecond) defer cancel() - req := httptest.NewRequest(http.MethodGet, "/", nil).WithContext(ctx) + req := httptest.NewRequest(http.MethodGet, "/", http.NoBody).WithContext(ctx) resp, err := tripper.RoundTrip(req) if resp != nil { diff --git a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go index 047d0176..844d494e 100644 --- a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go +++ b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go @@ -33,15 +33,27 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema { }, "connection_info": schema.SingleNestedAttribute{ Attributes: map[string]schema.Attribute{ - "host": schema.StringAttribute{ + "write": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "host": schema.StringAttribute{ + Computed: true, + Description: "The host of the instance.", + MarkdownDescription: "The host of the instance.", + }, + "port": schema.Int64Attribute{ + Computed: true, + Description: "The port of the instance.", + MarkdownDescription: "The port of the instance.", + }, + }, + CustomType: WriteType{ + ObjectType: types.ObjectType{ + AttrTypes: WriteValue{}.AttributeTypes(ctx), + }, + }, Computed: true, - Description: "The host of the instance.", - MarkdownDescription: "The host of the instance.", - }, - "port": schema.Int64Attribute{ - Computed: true, - Description: "The port of the instance.", - MarkdownDescription: "The port of the instance.", + Description: "The DNS name and port in the instance overview", + MarkdownDescription: "The DNS name and port in the instance overview", }, }, CustomType: ConnectionInfoType{ @@ -50,8 +62,8 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema { }, }, Computed: true, - Description: "The DNS name and port in the instance overview", - MarkdownDescription: "The DNS name and port in the instance overview", + Description: "The connection information of the instance", + MarkdownDescription: "The connection information of the instance", }, "encryption": schema.SingleNestedAttribute{ Attributes: map[string]schema.Attribute{ @@ -243,40 +255,22 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob attributes := in.Attributes() - hostAttribute, ok := attributes["host"] + writeAttribute, ok := attributes["write"] if !ok { diags.AddError( "Attribute Missing", - `host is missing from object`) + `write is missing from object`) return nil, diags } - hostVal, ok := hostAttribute.(basetypes.StringValue) + writeVal, ok := writeAttribute.(basetypes.ObjectValue) if !ok { diags.AddError( "Attribute Wrong Type", - fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute)) - } - - portAttribute, ok := attributes["port"] - - if !ok { - diags.AddError( - "Attribute Missing", - `port is missing from object`) - - return nil, diags - } - - portVal, ok := portAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute)) + fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) } if diags.HasError() { @@ -284,8 +278,7 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob } return ConnectionInfoValue{ - Host: hostVal, - Port: portVal, + Write: writeVal, state: attr.ValueStateKnown, }, diags } @@ -353,40 +346,22 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[ return NewConnectionInfoValueUnknown(), diags } - hostAttribute, ok := attributes["host"] + writeAttribute, ok := attributes["write"] if !ok { diags.AddError( "Attribute Missing", - `host is missing from object`) + `write is missing from object`) return NewConnectionInfoValueUnknown(), diags } - hostVal, ok := hostAttribute.(basetypes.StringValue) + writeVal, ok := writeAttribute.(basetypes.ObjectValue) if !ok { diags.AddError( "Attribute Wrong Type", - fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute)) - } - - portAttribute, ok := attributes["port"] - - if !ok { - diags.AddError( - "Attribute Missing", - `port is missing from object`) - - return NewConnectionInfoValueUnknown(), diags - } - - portVal, ok := portAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute)) + fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) } if diags.HasError() { @@ -394,8 +369,7 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[ } return ConnectionInfoValue{ - Host: hostVal, - Port: portVal, + Write: writeVal, state: attr.ValueStateKnown, }, diags } @@ -468,12 +442,401 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value { var _ basetypes.ObjectValuable = ConnectionInfoValue{} type ConnectionInfoValue struct { + Write basetypes.ObjectValue `tfsdk:"write"` + state attr.ValueState +} + +func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 1) + + var val tftypes.Value + var err error + + attrTypes["write"] = basetypes.ObjectType{ + AttrTypes: WriteValue{}.AttributeTypes(ctx), + }.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 1) + + val, err = v.Write.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["write"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v ConnectionInfoValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v ConnectionInfoValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v ConnectionInfoValue) String() string { + return "ConnectionInfoValue" +} + +func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + var write basetypes.ObjectValue + + if v.Write.IsNull() { + write = types.ObjectNull( + WriteValue{}.AttributeTypes(ctx), + ) + } + + if v.Write.IsUnknown() { + write = types.ObjectUnknown( + WriteValue{}.AttributeTypes(ctx), + ) + } + + if !v.Write.IsNull() && !v.Write.IsUnknown() { + write = types.ObjectValueMust( + WriteValue{}.AttributeTypes(ctx), + v.Write.Attributes(), + ) + } + + attributeTypes := map[string]attr.Type{ + "write": basetypes.ObjectType{ + AttrTypes: WriteValue{}.AttributeTypes(ctx), + }, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "write": write, + }) + + return objVal, diags +} + +func (v ConnectionInfoValue) Equal(o attr.Value) bool { + other, ok := o.(ConnectionInfoValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Write.Equal(other.Write) { + return false + } + + return true +} + +func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type { + return ConnectionInfoType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "write": basetypes.ObjectType{ + AttrTypes: WriteValue{}.AttributeTypes(ctx), + }, + } +} + +var _ basetypes.ObjectTypable = WriteType{} + +type WriteType struct { + basetypes.ObjectType +} + +func (t WriteType) Equal(o attr.Type) bool { + other, ok := o.(WriteType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t WriteType) String() string { + return "WriteType" +} + +func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + hostAttribute, ok := attributes["host"] + + if !ok { + diags.AddError( + "Attribute Missing", + `host is missing from object`) + + return nil, diags + } + + hostVal, ok := hostAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute)) + } + + portAttribute, ok := attributes["port"] + + if !ok { + diags.AddError( + "Attribute Missing", + `port is missing from object`) + + return nil, diags + } + + portVal, ok := portAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return WriteValue{ + Host: hostVal, + Port: portVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewWriteValueNull() WriteValue { + return WriteValue{ + state: attr.ValueStateNull, + } +} + +func NewWriteValueUnknown() WriteValue { + return WriteValue{ + state: attr.ValueStateUnknown, + } +} + +func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing WriteValue Attribute Value", + "While creating a WriteValue value, a missing attribute value was detected. "+ + "A WriteValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid WriteValue Attribute Type", + "While creating a WriteValue value, an invalid attribute value was detected. "+ + "A WriteValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra WriteValue Attribute Value", + "While creating a WriteValue value, an extra attribute value was detected. "+ + "A WriteValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra WriteValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewWriteValueUnknown(), diags + } + + hostAttribute, ok := attributes["host"] + + if !ok { + diags.AddError( + "Attribute Missing", + `host is missing from object`) + + return NewWriteValueUnknown(), diags + } + + hostVal, ok := hostAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute)) + } + + portAttribute, ok := attributes["port"] + + if !ok { + diags.AddError( + "Attribute Missing", + `port is missing from object`) + + return NewWriteValueUnknown(), diags + } + + portVal, ok := portAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute)) + } + + if diags.HasError() { + return NewWriteValueUnknown(), diags + } + + return WriteValue{ + Host: hostVal, + Port: portVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue { + object, diags := NewWriteValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewWriteValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewWriteValueUnknown(), nil + } + + if in.IsNull() { + return NewWriteValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t WriteType) ValueType(ctx context.Context) attr.Value { + return WriteValue{} +} + +var _ basetypes.ObjectValuable = WriteValue{} + +type WriteValue struct { Host basetypes.StringValue `tfsdk:"host"` Port basetypes.Int64Value `tfsdk:"port"` state attr.ValueState } -func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { +func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { attrTypes := make(map[string]tftypes.Type, 2) var val tftypes.Value @@ -518,19 +881,19 @@ func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Valu } } -func (v ConnectionInfoValue) IsNull() bool { +func (v WriteValue) IsNull() bool { return v.state == attr.ValueStateNull } -func (v ConnectionInfoValue) IsUnknown() bool { +func (v WriteValue) IsUnknown() bool { return v.state == attr.ValueStateUnknown } -func (v ConnectionInfoValue) String() string { - return "ConnectionInfoValue" +func (v WriteValue) String() string { + return "WriteValue" } -func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { +func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { var diags diag.Diagnostics attributeTypes := map[string]attr.Type{ @@ -556,8 +919,8 @@ func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.Objec return objVal, diags } -func (v ConnectionInfoValue) Equal(o attr.Value) bool { - other, ok := o.(ConnectionInfoValue) +func (v WriteValue) Equal(o attr.Value) bool { + other, ok := o.(WriteValue) if !ok { return false @@ -582,15 +945,15 @@ func (v ConnectionInfoValue) Equal(o attr.Value) bool { return true } -func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type { - return ConnectionInfoType{ +func (v WriteValue) Type(ctx context.Context) attr.Type { + return WriteType{ basetypes.ObjectType{ AttrTypes: v.AttributeTypes(ctx), }, } } -func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type { +func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type { return map[string]attr.Type{ "host": basetypes.StringType{}, "port": basetypes.Int64Type{}, diff --git a/stackit/internal/services/postgresflexalpha/instance/functions.go b/stackit/internal/services/postgresflexalpha/instance/functions.go index 133be314..e9c9c9c2 100644 --- a/stackit/internal/services/postgresflexalpha/instance/functions.go +++ b/stackit/internal/services/postgresflexalpha/instance/functions.go @@ -33,9 +33,9 @@ func mapGetInstanceResponseToModel( ) } - isConnectionInfoIncomplete := resp.ConnectionInfo == nil || - resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" || - resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0 + isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil || + resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" || + resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0 if isConnectionInfoIncomplete { m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull() @@ -43,22 +43,17 @@ func mapGetInstanceResponseToModel( m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust( postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx), map[string]attr.Value{ - "host": types.StringPointerValue(resp.ConnectionInfo.Host), - "port": types.Int64PointerValue(resp.ConnectionInfo.Port), + "write": postgresflexalpharesource.NewWriteValueMust( + postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "host": types.StringPointerValue(resp.ConnectionInfo.Write.Host), + "port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port), + }, + ), }, ) } - m.ConnectionInfo.Host = types.StringValue("") - if host, ok := resp.ConnectionInfo.GetHostOk(); ok { - m.ConnectionInfo.Host = types.StringValue(host) - } - - m.ConnectionInfo.Port = types.Int64Value(0) - if port, ok := resp.ConnectionInfo.GetPortOk(); ok { - m.ConnectionInfo.Port = types.Int64Value(port) - } - m.FlavorId = types.StringValue(resp.GetFlavorId()) if m.Id.IsNull() || m.Id.IsUnknown() { m.Id = utils.BuildInternalTerraformId( @@ -164,9 +159,9 @@ func mapGetDataInstanceResponseToModel( } func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) { - isConnectionInfoIncomplete := resp.ConnectionInfo == nil || - resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" || - resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0 + isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil || + resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" || + resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0 if isConnectionInfoIncomplete { m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull() @@ -174,8 +169,13 @@ func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgre m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust( postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx), map[string]attr.Value{ - "host": types.StringPointerValue(resp.ConnectionInfo.Host), - "port": types.Int64PointerValue(resp.ConnectionInfo.Port), + "write": postgresflexalphadatasource.NewWriteValueMust( + postgresflexalphadatasource.WriteValue{}.AttributeTypes(ctx), + map[string]attr.Value{ + "host": types.StringPointerValue(resp.ConnectionInfo.Write.Host), + "port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port), + }, + ), }, ) } diff --git a/stackit/internal/services/postgresflexalpha/instance/resource.go b/stackit/internal/services/postgresflexalpha/instance/resource.go index 74661cd9..d4049fb5 100644 --- a/stackit/internal/services/postgresflexalpha/instance/resource.go +++ b/stackit/internal/services/postgresflexalpha/instance/resource.go @@ -328,10 +328,6 @@ func (r *instanceResource) Read( ctx = core.InitProviderContext(ctx) - // projectId := model.ProjectId.ValueString() - // region := r.providerData.GetRegionWithOverride(model.Region) - // instanceId := model.InstanceId.ValueString() - var projectId string if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { projectId = model.ProjectId.ValueString() @@ -435,18 +431,6 @@ func (r *instanceResource) Update( return } - // if model.InstanceId.IsNull() || model.InstanceId.IsUnknown() { - // core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "instanceId is null or unknown") - // return - //} - // - // if model.ProjectId.IsNull() || model.ProjectId.IsUnknown() { - // core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "projectId is null or unknown") - // return - //} - - // projectId := model.ProjectId.ValueString() - // instanceId := model.InstanceId.ValueString() projectId := identityData.ProjectID.ValueString() instanceId := identityData.InstanceID.ValueString() region := model.Region.ValueString() diff --git a/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go b/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go index 35d31cbc..25ad092b 100644 --- a/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go +++ b/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go @@ -35,15 +35,27 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema { }, "connection_info": schema.SingleNestedAttribute{ Attributes: map[string]schema.Attribute{ - "host": schema.StringAttribute{ + "write": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "host": schema.StringAttribute{ + Computed: true, + Description: "The host of the instance.", + MarkdownDescription: "The host of the instance.", + }, + "port": schema.Int64Attribute{ + Computed: true, + Description: "The port of the instance.", + MarkdownDescription: "The port of the instance.", + }, + }, + CustomType: WriteType{ + ObjectType: types.ObjectType{ + AttrTypes: WriteValue{}.AttributeTypes(ctx), + }, + }, Computed: true, - Description: "The host of the instance.", - MarkdownDescription: "The host of the instance.", - }, - "port": schema.Int64Attribute{ - Computed: true, - Description: "The port of the instance.", - MarkdownDescription: "The port of the instance.", + Description: "The DNS name and port in the instance overview", + MarkdownDescription: "The DNS name and port in the instance overview", }, }, CustomType: ConnectionInfoType{ @@ -52,8 +64,8 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema { }, }, Computed: true, - Description: "The DNS name and port in the instance overview", - MarkdownDescription: "The DNS name and port in the instance overview", + Description: "The connection information of the instance", + MarkdownDescription: "The connection information of the instance", }, "encryption": schema.SingleNestedAttribute{ Attributes: map[string]schema.Attribute{ @@ -263,40 +275,22 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob attributes := in.Attributes() - hostAttribute, ok := attributes["host"] + writeAttribute, ok := attributes["write"] if !ok { diags.AddError( "Attribute Missing", - `host is missing from object`) + `write is missing from object`) return nil, diags } - hostVal, ok := hostAttribute.(basetypes.StringValue) + writeVal, ok := writeAttribute.(basetypes.ObjectValue) if !ok { diags.AddError( "Attribute Wrong Type", - fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute)) - } - - portAttribute, ok := attributes["port"] - - if !ok { - diags.AddError( - "Attribute Missing", - `port is missing from object`) - - return nil, diags - } - - portVal, ok := portAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute)) + fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) } if diags.HasError() { @@ -304,8 +298,7 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob } return ConnectionInfoValue{ - Host: hostVal, - Port: portVal, + Write: writeVal, state: attr.ValueStateKnown, }, diags } @@ -373,40 +366,22 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[ return NewConnectionInfoValueUnknown(), diags } - hostAttribute, ok := attributes["host"] + writeAttribute, ok := attributes["write"] if !ok { diags.AddError( "Attribute Missing", - `host is missing from object`) + `write is missing from object`) return NewConnectionInfoValueUnknown(), diags } - hostVal, ok := hostAttribute.(basetypes.StringValue) + writeVal, ok := writeAttribute.(basetypes.ObjectValue) if !ok { diags.AddError( "Attribute Wrong Type", - fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute)) - } - - portAttribute, ok := attributes["port"] - - if !ok { - diags.AddError( - "Attribute Missing", - `port is missing from object`) - - return NewConnectionInfoValueUnknown(), diags - } - - portVal, ok := portAttribute.(basetypes.Int64Value) - - if !ok { - diags.AddError( - "Attribute Wrong Type", - fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute)) + fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) } if diags.HasError() { @@ -414,8 +389,7 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[ } return ConnectionInfoValue{ - Host: hostVal, - Port: portVal, + Write: writeVal, state: attr.ValueStateKnown, }, diags } @@ -488,12 +462,401 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value { var _ basetypes.ObjectValuable = ConnectionInfoValue{} type ConnectionInfoValue struct { + Write basetypes.ObjectValue `tfsdk:"write"` + state attr.ValueState +} + +func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { + attrTypes := make(map[string]tftypes.Type, 1) + + var val tftypes.Value + var err error + + attrTypes["write"] = basetypes.ObjectType{ + AttrTypes: WriteValue{}.AttributeTypes(ctx), + }.TerraformType(ctx) + + objectType := tftypes.Object{AttributeTypes: attrTypes} + + switch v.state { + case attr.ValueStateKnown: + vals := make(map[string]tftypes.Value, 1) + + val, err = v.Write.ToTerraformValue(ctx) + + if err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + vals["write"] = val + + if err := tftypes.ValidateValue(objectType, vals); err != nil { + return tftypes.NewValue(objectType, tftypes.UnknownValue), err + } + + return tftypes.NewValue(objectType, vals), nil + case attr.ValueStateNull: + return tftypes.NewValue(objectType, nil), nil + case attr.ValueStateUnknown: + return tftypes.NewValue(objectType, tftypes.UnknownValue), nil + default: + panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state)) + } +} + +func (v ConnectionInfoValue) IsNull() bool { + return v.state == attr.ValueStateNull +} + +func (v ConnectionInfoValue) IsUnknown() bool { + return v.state == attr.ValueStateUnknown +} + +func (v ConnectionInfoValue) String() string { + return "ConnectionInfoValue" +} + +func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { + var diags diag.Diagnostics + + var write basetypes.ObjectValue + + if v.Write.IsNull() { + write = types.ObjectNull( + WriteValue{}.AttributeTypes(ctx), + ) + } + + if v.Write.IsUnknown() { + write = types.ObjectUnknown( + WriteValue{}.AttributeTypes(ctx), + ) + } + + if !v.Write.IsNull() && !v.Write.IsUnknown() { + write = types.ObjectValueMust( + WriteValue{}.AttributeTypes(ctx), + v.Write.Attributes(), + ) + } + + attributeTypes := map[string]attr.Type{ + "write": basetypes.ObjectType{ + AttrTypes: WriteValue{}.AttributeTypes(ctx), + }, + } + + if v.IsNull() { + return types.ObjectNull(attributeTypes), diags + } + + if v.IsUnknown() { + return types.ObjectUnknown(attributeTypes), diags + } + + objVal, diags := types.ObjectValue( + attributeTypes, + map[string]attr.Value{ + "write": write, + }) + + return objVal, diags +} + +func (v ConnectionInfoValue) Equal(o attr.Value) bool { + other, ok := o.(ConnectionInfoValue) + + if !ok { + return false + } + + if v.state != other.state { + return false + } + + if v.state != attr.ValueStateKnown { + return true + } + + if !v.Write.Equal(other.Write) { + return false + } + + return true +} + +func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type { + return ConnectionInfoType{ + basetypes.ObjectType{ + AttrTypes: v.AttributeTypes(ctx), + }, + } +} + +func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type { + return map[string]attr.Type{ + "write": basetypes.ObjectType{ + AttrTypes: WriteValue{}.AttributeTypes(ctx), + }, + } +} + +var _ basetypes.ObjectTypable = WriteType{} + +type WriteType struct { + basetypes.ObjectType +} + +func (t WriteType) Equal(o attr.Type) bool { + other, ok := o.(WriteType) + + if !ok { + return false + } + + return t.ObjectType.Equal(other.ObjectType) +} + +func (t WriteType) String() string { + return "WriteType" +} + +func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) { + var diags diag.Diagnostics + + attributes := in.Attributes() + + hostAttribute, ok := attributes["host"] + + if !ok { + diags.AddError( + "Attribute Missing", + `host is missing from object`) + + return nil, diags + } + + hostVal, ok := hostAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute)) + } + + portAttribute, ok := attributes["port"] + + if !ok { + diags.AddError( + "Attribute Missing", + `port is missing from object`) + + return nil, diags + } + + portVal, ok := portAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute)) + } + + if diags.HasError() { + return nil, diags + } + + return WriteValue{ + Host: hostVal, + Port: portVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewWriteValueNull() WriteValue { + return WriteValue{ + state: attr.ValueStateNull, + } +} + +func NewWriteValueUnknown() WriteValue { + return WriteValue{ + state: attr.ValueStateUnknown, + } +} + +func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) { + var diags diag.Diagnostics + + // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521 + ctx := context.Background() + + for name, attributeType := range attributeTypes { + attribute, ok := attributes[name] + + if !ok { + diags.AddError( + "Missing WriteValue Attribute Value", + "While creating a WriteValue value, a missing attribute value was detected. "+ + "A WriteValue must contain values for all attributes, even if null or unknown. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()), + ) + + continue + } + + if !attributeType.Equal(attribute.Type(ctx)) { + diags.AddError( + "Invalid WriteValue Attribute Type", + "While creating a WriteValue value, an invalid attribute value was detected. "+ + "A WriteValue must use a matching attribute type for the value. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+ + fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)), + ) + } + } + + for name := range attributes { + _, ok := attributeTypes[name] + + if !ok { + diags.AddError( + "Extra WriteValue Attribute Value", + "While creating a WriteValue value, an extra attribute value was detected. "+ + "A WriteValue must not contain values beyond the expected attribute types. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Extra WriteValue Attribute Name: %s", name), + ) + } + } + + if diags.HasError() { + return NewWriteValueUnknown(), diags + } + + hostAttribute, ok := attributes["host"] + + if !ok { + diags.AddError( + "Attribute Missing", + `host is missing from object`) + + return NewWriteValueUnknown(), diags + } + + hostVal, ok := hostAttribute.(basetypes.StringValue) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute)) + } + + portAttribute, ok := attributes["port"] + + if !ok { + diags.AddError( + "Attribute Missing", + `port is missing from object`) + + return NewWriteValueUnknown(), diags + } + + portVal, ok := portAttribute.(basetypes.Int64Value) + + if !ok { + diags.AddError( + "Attribute Wrong Type", + fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute)) + } + + if diags.HasError() { + return NewWriteValueUnknown(), diags + } + + return WriteValue{ + Host: hostVal, + Port: portVal, + state: attr.ValueStateKnown, + }, diags +} + +func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue { + object, diags := NewWriteValue(attributeTypes, attributes) + + if diags.HasError() { + // This could potentially be added to the diag package. + diagsStrings := make([]string, 0, len(diags)) + + for _, diagnostic := range diags { + diagsStrings = append(diagsStrings, fmt.Sprintf( + "%s | %s | %s", + diagnostic.Severity(), + diagnostic.Summary(), + diagnostic.Detail())) + } + + panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n")) + } + + return object +} + +func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + if in.Type() == nil { + return NewWriteValueNull(), nil + } + + if !in.Type().Equal(t.TerraformType(ctx)) { + return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type()) + } + + if !in.IsKnown() { + return NewWriteValueUnknown(), nil + } + + if in.IsNull() { + return NewWriteValueNull(), nil + } + + attributes := map[string]attr.Value{} + + val := map[string]tftypes.Value{} + + err := in.As(&val) + + if err != nil { + return nil, err + } + + for k, v := range val { + a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v) + + if err != nil { + return nil, err + } + + attributes[k] = a + } + + return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil +} + +func (t WriteType) ValueType(ctx context.Context) attr.Value { + return WriteValue{} +} + +var _ basetypes.ObjectValuable = WriteValue{} + +type WriteValue struct { Host basetypes.StringValue `tfsdk:"host"` Port basetypes.Int64Value `tfsdk:"port"` state attr.ValueState } -func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { +func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { attrTypes := make(map[string]tftypes.Type, 2) var val tftypes.Value @@ -538,19 +901,19 @@ func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Valu } } -func (v ConnectionInfoValue) IsNull() bool { +func (v WriteValue) IsNull() bool { return v.state == attr.ValueStateNull } -func (v ConnectionInfoValue) IsUnknown() bool { +func (v WriteValue) IsUnknown() bool { return v.state == attr.ValueStateUnknown } -func (v ConnectionInfoValue) String() string { - return "ConnectionInfoValue" +func (v WriteValue) String() string { + return "WriteValue" } -func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { +func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { var diags diag.Diagnostics attributeTypes := map[string]attr.Type{ @@ -576,8 +939,8 @@ func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.Objec return objVal, diags } -func (v ConnectionInfoValue) Equal(o attr.Value) bool { - other, ok := o.(ConnectionInfoValue) +func (v WriteValue) Equal(o attr.Value) bool { + other, ok := o.(WriteValue) if !ok { return false @@ -602,15 +965,15 @@ func (v ConnectionInfoValue) Equal(o attr.Value) bool { return true } -func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type { - return ConnectionInfoType{ +func (v WriteValue) Type(ctx context.Context) attr.Type { + return WriteType{ basetypes.ObjectType{ AttrTypes: v.AttributeTypes(ctx), }, } } -func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type { +func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type { return map[string]attr.Type{ "host": basetypes.StringType{}, "port": basetypes.Int64Type{}, diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go index 7e71a32f..d5ffd00c 100644 --- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go +++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go @@ -30,39 +30,46 @@ var testInstances []string func init() { sweeperName := fmt.Sprintf("%s_%s", pfx, "sweeper") - resource.AddTestSweepers(sweeperName, &resource.Sweeper{ - Name: sweeperName, - F: func(region string) error { - ctx := context.Background() - apiClientConfigOptions := []config.ConfigurationOption{} - apiClient, err := postgresflexalpha2.NewAPIClient(apiClientConfigOptions...) - if err != nil { - log.Fatalln(err) - } + resource.AddTestSweepers( + sweeperName, &resource.Sweeper{ + Name: sweeperName, + F: func(_ string) error { // region is passed by the testing framework + ctx := context.Background() + apiClientConfigOptions := []config.ConfigurationOption{} + apiClient, err := postgresflexalpha2.NewAPIClient(apiClientConfigOptions...) + if err != nil { + log.Fatalln(err) + } - instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region). - Size(100). - Execute() - if err != nil { - log.Fatalln(err) - } + instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region). + Size(100). + Execute() + if err != nil { + log.Fatalln(err) + } - for _, inst := range instances.GetInstances() { - if strings.HasPrefix(inst.GetName(), "tf-acc-") { - for _, item := range testInstances { - if inst.GetName() == item { - delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, testutils.Region, inst.GetId()) - if delErr != nil { - // TODO: maybe just warn? - log.Fatalln(delErr) + for _, inst := range instances.GetInstances() { + if strings.HasPrefix(inst.GetName(), "tf-acc-") { + for _, item := range testInstances { + if inst.GetName() == item { + delErr := apiClient.DeleteInstanceRequestExecute( + ctx, + testutils.ProjectId, + testutils.Region, + inst.GetId(), + ) + if delErr != nil { + // TODO: maybe just warn? + log.Fatalln(delErr) + } } } } } - } - return nil + return nil + }, }, - }) + ) } func TestInstanceResourceSchema(t *testing.T) { @@ -195,57 +202,67 @@ func TestAccInstance(t *testing.T) { updSizeData := exData updSizeData.Size = 25 - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { - testAccPreCheck(t) - t.Logf(" ... working on instance %s", exData.TfName) - testInstances = append(testInstances, exData.TfName) - }, - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - // Create and verify - { - Config: testutils.StringFromTemplateMust( - "testdata/instance_template.gompl", - exData, - ), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", exData.Name), - resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", exData.TfName), "id"), - ), + resource.ParallelTest( + t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", exData.TfName) + testInstances = append(testInstances, exData.TfName) }, - // Update name and verify - { - Config: testutils.StringFromTemplateMust( - "testdata/instance_template.gompl", - updNameData, - ), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", updNameData.Name), - ), - }, - // Update size and verify - { - Config: testutils.StringFromTemplateMust( - "testdata/instance_template.gompl", - updSizeData, - ), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - testutils.ResStr(pfx, "instance", exData.TfName), - "storage.size", - strconv.Itoa(int(updSizeData.Size)), + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + exData, ), - ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr( + testutils.ResStr(pfx, "instance", exData.TfName), + "name", + exData.Name, + ), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", exData.TfName), "id"), + ), + }, + // Update name and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + updNameData, + ), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + testutils.ResStr(pfx, "instance", exData.TfName), + "name", + updNameData.Name, + ), + ), + }, + // Update size and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + updSizeData, + ), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + testutils.ResStr(pfx, "instance", exData.TfName), + "storage.size", + strconv.Itoa(int(updSizeData.Size)), + ), + ), + }, + //// Import test + //{ + // ResourceName: "example_resource.test", + // ImportState: true, + // ImportStateVerify: true, + // }, }, - //// Import test - //{ - // ResourceName: "example_resource.test", - // ImportState: true, - // ImportStateVerify: true, - // }, }, - }) + ) } func TestAccInstanceWithUsers(t *testing.T) { @@ -260,29 +277,35 @@ func TestAccInstanceWithUsers(t *testing.T) { }, } - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { - testAccPreCheck(t) - t.Logf(" ... working on instance %s", data.TfName) - testInstances = append(testInstances, data.TfName) - }, - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - // Create and verify - { - Config: testutils.StringFromTemplateMust( - "testdata/instance_template.gompl", - data, - ), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name), - resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"), - resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName), - resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"), - ), + resource.ParallelTest( + t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", data.TfName) + testInstances = append(testInstances, data.TfName) + }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + data, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr( + testutils.ResStr(pfx, "instance", data.TfName), + "name", + data.Name, + ), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"), + ), + }, }, }, - }) + ) } func TestAccInstanceWithDatabases(t *testing.T) { @@ -306,32 +329,38 @@ func TestAccInstanceWithDatabases(t *testing.T) { }, } - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { - testAccPreCheck(t) - t.Logf(" ... working on instance %s", data.TfName) - testInstances = append(testInstances, data.TfName) - }, - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - // Create and verify - { - Config: testutils.StringFromTemplateMust( - "testdata/instance_template.gompl", - data, - ), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name), - resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"), - resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName), - resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"), - resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "name", dbName), - resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "owner", userName), - resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "database", dbName), "id"), - ), + resource.ParallelTest( + t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + t.Logf(" ... working on instance %s", data.TfName) + testInstances = append(testInstances, data.TfName) + }, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + // Create and verify + { + Config: testutils.StringFromTemplateMust( + "testdata/instance_template.gompl", + data, + ), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr( + testutils.ResStr(pfx, "instance", data.TfName), + "name", + data.Name, + ), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "name", dbName), + resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "owner", userName), + resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "database", dbName), "id"), + ), + }, }, }, - }) + ) } // func setupMockServer() *httptest.Server { @@ -461,7 +490,7 @@ func TestAccInstanceWithDatabases(t *testing.T) { // "project_id": testutils.ProjectId, //} // -//func configResources(backupSchedule string, _ *string) string { +// func configResources(backupSchedule string, _ *string) string { // return fmt.Sprintf( // ` // %s @@ -535,7 +564,7 @@ func TestAccInstanceWithDatabases(t *testing.T) { // ) //} // -//func TestAccPostgresFlexFlexResource(t *testing.T) { +// func TestAccPostgresFlexFlexResource(t *testing.T) { // resource.ParallelTest( // t, resource.TestCase{ // ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, @@ -954,7 +983,7 @@ func TestAccInstanceWithDatabases(t *testing.T) { // ) //} // -//func testAccCheckPostgresFlexDestroy(s *terraform.State) error { +// func testAccCheckPostgresFlexDestroy(s *terraform.State) error { // ctx := context.Background() // var client *postgresflex.APIClient // var err error diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go index cf9fd389..ab7ec563 100644 --- a/stackit/internal/services/postgresflexalpha/user/resource.go +++ b/stackit/internal/services/postgresflexalpha/user/resource.go @@ -252,7 +252,6 @@ func (r *userResource) Create( model.UserId = types.Int64Value(id) model.Password = types.StringValue(userResp.GetPassword()) model.Status = types.StringValue(userResp.GetStatus()) - //model.ConnectionString = types.StringValue(userResp.GetConnectionString()) waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler( ctx, diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasource.go b/stackit/internal/services/sqlserverflexalpha/database/datasource.go index 3c40d6b0..5155b41c 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/datasource.go @@ -143,7 +143,6 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) tflog.Info(ctx, "SQL Server Flex beta database read") - } // handleReadError centralizes API error handling for the Read operation. diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go index 72e8105f..5b46c52c 100644 --- a/stackit/internal/services/sqlserverflexalpha/database/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go @@ -36,10 +36,6 @@ var ( // Define errors errDatabaseNotFound = errors.New("database not found") - - // Error message constants - extractErrorSummary = "extracting failed" - extractErrorMessage = "Extracting identity data: %v" ) func NewDatabaseResource() resource.Resource { @@ -186,26 +182,6 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques payLoad.Name = data.Name.ValueStringPointer() payLoad.Owner = data.Owner.ValueStringPointer() - //_, err := wait.WaitForUserWaitHandler( - // ctx, - // r.client, - // projectId, - // instanceId, - // region, - // data.Owner.ValueString(), - //). - // SetSleepBeforeWait(10 * time.Second). - // WaitWithContext(ctx) - //if err != nil { - // core.LogAndAddError( - // ctx, - // &resp.Diagnostics, - // createErr, - // fmt.Sprintf("Calling API: %v", err), - // ) - // return - //} - createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId). CreateDatabaseRequestPayload(payLoad). Execute() @@ -451,7 +427,9 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques &resp.Diagnostics, "Error deleting database", fmt.Sprintf( - "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId)) + "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId, + ), + ) return } @@ -468,7 +446,6 @@ func (r *databaseResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go index 793b7e23..a8567903 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/functions.go @@ -247,7 +247,6 @@ func toCreatePayload( conversion.StringValueToPointer(model.Version), ), }, nil - } func toUpdatePayload( diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go index 6adf07df..3b1f4fd3 100644 --- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go @@ -54,7 +54,7 @@ type InstanceResourceIdentityModel struct { } func (r *instanceResource) Metadata( - ctx context.Context, + _ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse, ) { @@ -64,7 +64,7 @@ func (r *instanceResource) Metadata( //go:embed planModifiers.yaml var modifiersFileByte []byte -func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { +func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { s := sqlserverflexalphaResGen.InstanceResourceSchema(ctx) fields, err := utils.ReadModifiersConfig(modifiersFileByte) @@ -147,7 +147,6 @@ func (r *instanceResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go index 8658f0ce..9eebac99 100644 --- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go @@ -288,8 +288,8 @@ func TestAccInstanceNoEncryption(t *testing.T) { // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), @@ -389,8 +389,8 @@ func TestAccInstanceEncryption(t *testing.T) { // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource.go b/stackit/internal/services/sqlserverflexalpha/user/datasource.go index 64efb4a3..e191e5a7 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/datasource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/datasource.go @@ -20,8 +20,6 @@ import ( var _ datasource.DataSource = (*userDataSource)(nil) -const errorPrefix = "[sqlserverflexalpha - User]" - func NewUserDataSource() datasource.DataSource { return &userDataSource{} } diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go index 8a0c1df7..ee322fab 100644 --- a/stackit/internal/services/sqlserverflexalpha/user/resource.go +++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go @@ -20,7 +20,6 @@ import ( "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - sqlserverflexalphagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen" sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils" sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha" @@ -59,7 +58,7 @@ type userResource struct { providerData core.ProviderData } -func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user" } @@ -118,7 +117,7 @@ var modifiersFileByte []byte // Schema defines the schema for the resource. func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - s := sqlserverflexalphagen.UserResourceSchema(ctx) + s := sqlserverflexalphaResGen.UserResourceSchema(ctx) fields, err := utils.ReadModifiersConfig(modifiersFileByte) if err != nil { @@ -470,7 +469,6 @@ func (r *userResource) Delete( // Delete existing record set _, err = sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). WaitWithContext(ctx) - // err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err)) return diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go index 401df291..c6fa31bf 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/datasource.go @@ -143,7 +143,6 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) tflog.Info(ctx, "SQL Server Flex beta database read") - } // handleReadError centralizes API error handling for the Read operation. diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go index 99713dcf..9862ca57 100644 --- a/stackit/internal/services/sqlserverflexbeta/database/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go @@ -36,10 +36,6 @@ var ( // Define errors errDatabaseNotFound = errors.New("database not found") - - // Error message constants - extractErrorSummary = "extracting failed" - extractErrorMessage = "Extracting identity data: %v" ) func NewDatabaseResource() resource.Resource { @@ -430,7 +426,9 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques &resp.Diagnostics, "Error deleting database", fmt.Sprintf( - "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId)) + "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId, + ), + ) return } @@ -449,7 +447,6 @@ func (r *databaseResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return @@ -559,45 +556,4 @@ func (r *databaseResource) ImportState( tflog.Info(ctx, "Sqlserverflexbeta database state imported") } -// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model. -func (r *databaseResource) extractIdentityData( - model resourceModel, - identity DatabaseResourceIdentityModel, -) (projectId, region, instanceId, databaseName string, err error) { - if !model.Name.IsNull() && !model.Name.IsUnknown() { - databaseName = model.Name.ValueString() - } else { - if identity.DatabaseName.IsNull() || identity.DatabaseName.IsUnknown() { - return "", "", "", "", fmt.Errorf("database_name not found in config") - } - databaseName = identity.DatabaseName.ValueString() - } - - if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { - projectId = model.ProjectId.ValueString() - } else { - if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() { - return "", "", "", "", fmt.Errorf("project_id not found in config") - } - projectId = identity.ProjectID.ValueString() - } - - if !model.Region.IsNull() && !model.Region.IsUnknown() { - region = r.providerData.GetRegionWithOverride(model.Region) - } else { - if identity.Region.IsNull() || identity.Region.IsUnknown() { - return "", "", "", "", fmt.Errorf("region not found in config") - } - region = r.providerData.GetRegionWithOverride(identity.Region) - } - - if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() { - instanceId = model.InstanceId.ValueString() - } else { - if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() { - return "", "", "", "", fmt.Errorf("instance_id not found in config") - } - instanceId = identity.InstanceID.ValueString() - } - return projectId, region, instanceId, databaseName, nil -} +// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity mode diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go index cd18314a..77791ee6 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go @@ -236,7 +236,6 @@ func toCreatePayload( conversion.StringValueToPointer(model.Version), ), }, nil - } func toUpdatePayload( diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go b/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go index bf36c40e..0c6f6147 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/stackitcloud/stackit-sdk-go/core/utils" + sqlserverflexbetaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" sqlserverflexbetaRs "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen" ) @@ -28,11 +29,13 @@ func Test_handleDSEncryption(t *testing.T) { // TODO: Add test cases. } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := handleDSEncryption(tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) { - t.Errorf("handleDSEncryption() = %v, want %v", got, tt.want) - } - }) + t.Run( + tt.name, func(t *testing.T) { + if got := handleDSEncryption(t.Context(), tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) { + t.Errorf("handleDSEncryption() = %v, want %v", got, tt.want) + } + }, + ) } } @@ -86,11 +89,13 @@ func Test_handleEncryption(t *testing.T) { }, } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := handleEncryption(tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) { - t.Errorf("handleEncryption() = %v, want %v", got, tt.want) - } - }) + t.Run( + tt.name, func(t *testing.T) { + if got := handleEncryption(t.Context(), tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) { + t.Errorf("handleEncryption() = %v, want %v", got, tt.want) + } + }, + ) } } @@ -109,11 +114,18 @@ func Test_mapDataResponseToModel(t *testing.T) { // TODO: Add test cases. } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := mapDataResponseToModel(tt.args.ctx, tt.args.resp, tt.args.m, tt.args.tfDiags); (err != nil) != tt.wantErr { - t.Errorf("mapDataResponseToModel() error = %v, wantErr %v", err, tt.wantErr) - } - }) + t.Run( + tt.name, func(t *testing.T) { + if err := mapDataResponseToModel( + tt.args.ctx, + tt.args.resp, + tt.args.m, + tt.args.tfDiags, + ); (err != nil) != tt.wantErr { + t.Errorf("mapDataResponseToModel() error = %v, wantErr %v", err, tt.wantErr) + } + }, + ) } } @@ -132,11 +144,18 @@ func Test_mapResponseToModel(t *testing.T) { // TODO: Add test cases. } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := mapResponseToModel(tt.args.ctx, tt.args.resp, tt.args.m, tt.args.tfDiags); (err != nil) != tt.wantErr { - t.Errorf("mapResponseToModel() error = %v, wantErr %v", err, tt.wantErr) - } - }) + t.Run( + tt.name, func(t *testing.T) { + if err := mapResponseToModel( + tt.args.ctx, + tt.args.resp, + tt.args.m, + tt.args.tfDiags, + ); (err != nil) != tt.wantErr { + t.Errorf("mapResponseToModel() error = %v, wantErr %v", err, tt.wantErr) + } + }, + ) } } @@ -208,19 +227,18 @@ func Test_toCreatePayload(t *testing.T) { }, } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := toCreatePayload(tt.args.ctx, tt.args.model) - if (err != nil) != tt.wantErr { - t.Errorf("toCreatePayload() error = %v, wantErr %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(tt.want, got); diff != "" { - t.Errorf("model mismatch (-want +got):\n%s", diff) - } - //if !reflect.DeepEqual(got, tt.want) { - // t.Errorf("toCreatePayload() got = %v, want %v", got, tt.want) - //} - }) + t.Run( + tt.name, func(t *testing.T) { + got, err := toCreatePayload(tt.args.ctx, tt.args.model) + if (err != nil) != tt.wantErr { + t.Errorf("toCreatePayload() error = %v, wantErr %v", err, tt.wantErr) + return + } + if diff := cmp.Diff(tt.want, got); diff != "" { + t.Errorf("model mismatch (-want +got):\n%s", diff) + } + }, + ) } } @@ -239,15 +257,17 @@ func Test_toUpdatePayload(t *testing.T) { // TODO: Add test cases. } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := toUpdatePayload(tt.args.ctx, tt.args.m, tt.args.resp) - if (err != nil) != tt.wantErr { - t.Errorf("toUpdatePayload() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("toUpdatePayload() got = %v, want %v", got, tt.want) - } - }) + t.Run( + tt.name, func(t *testing.T) { + got, err := toUpdatePayload(tt.args.ctx, tt.args.m, tt.args.resp) + if (err != nil) != tt.wantErr { + t.Errorf("toUpdatePayload() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("toUpdatePayload() got = %v, want %v", got, tt.want) + } + }, + ) } } diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go index a91040bf..044b4b43 100644 --- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go @@ -54,7 +54,7 @@ type InstanceResourceIdentityModel struct { } func (r *instanceResource) Metadata( - ctx context.Context, + _ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse, ) { @@ -64,7 +64,7 @@ func (r *instanceResource) Metadata( //go:embed planModifiers.yaml var modifiersFileByte []byte -func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { +func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { s := sqlserverflexbetaResGen.InstanceResourceSchema(ctx) fields, err := utils.ReadModifiersConfig(modifiersFileByte) @@ -147,7 +147,6 @@ func (r *instanceResource) ModifyPlan( req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, ) { // nolint:gocritic // function signature required by Terraform - // skip initial empty configuration to avoid follow-up errors if req.Config.Raw.IsNull() { return diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go index 3beb0da8..887c5edd 100644 --- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go +++ b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go @@ -349,8 +349,8 @@ func TestAccInstanceNoEncryption(t *testing.T) { // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), @@ -450,8 +450,8 @@ func TestAccInstanceEncryption(t *testing.T) { // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), - //resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"), + // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"), // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"), diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go index 8457972a..d726bc2b 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/datasource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/datasource.go @@ -20,8 +20,6 @@ import ( var _ datasource.DataSource = (*userDataSource)(nil) -const errorPrefix = "[Sqlserverflexbeta - User]" - func NewUserDataSource() datasource.DataSource { return &userDataSource{} } diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go index f960c726..efaf3fc1 100644 --- a/stackit/internal/services/sqlserverflexbeta/user/resource.go +++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go @@ -20,7 +20,6 @@ import ( "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" - sqlserverflexbetagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/resources_gen" sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils" sqlserverflexbetaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta" @@ -59,7 +58,7 @@ type userResource struct { providerData core.ProviderData } -func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user" } @@ -108,23 +107,23 @@ func (r *userResource) ModifyPlan( } //// TODO: verify if this is needed - START - //var planRoles []string - //diags := planModel.Roles.ElementsAs(ctx, &planRoles, false) - //resp.Diagnostics.Append(diags...) - //if diags.HasError() { + // var planRoles []string + // diags := planModel.Roles.ElementsAs(ctx, &planRoles, false) + // resp.Diagnostics.Append(diags...) + // if diags.HasError() { // return //} - //slices.Sort(planRoles) - //var roles []attr.Value - //for _, role := range planRoles { + // slices.Sort(planRoles) + // var roles []attr.Value + // for _, role := range planRoles { // roles = append(roles, types.StringValue(string(role))) //} - //rolesSet, diags := types.ListValue(types.StringType, roles) - //resp.Diagnostics.Append(diags...) - //if diags.HasError() { + // rolesSet, diags := types.ListValue(types.StringType, roles) + // resp.Diagnostics.Append(diags...) + // if diags.HasError() { // return //} - //planModel.Roles = rolesSet + // planModel.Roles = rolesSet //// TODO: verify if this is needed - END resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...) @@ -138,7 +137,7 @@ var modifiersFileByte []byte // Schema defines the schema for the resource. func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - s := sqlserverflexbetagen.UserResourceSchema(ctx) + s := sqlserverflexbetaResGen.UserResourceSchema(ctx) fields, err := utils.ReadModifiersConfig(modifiersFileByte) if err != nil { @@ -435,7 +434,12 @@ func (r *userResource) Update( resp *resource.UpdateResponse, ) { // nolint:gocritic // function signature required by Terraform // Update shouldn't be called - core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", "an SQL server user can not be updated, only created") + core.LogAndAddError( + ctx, + &resp.Diagnostics, + "Error updating user", + "an SQL server user can not be updated, only created", + ) } // Delete deletes the resource and removes the Terraform state on success. @@ -489,7 +493,6 @@ func (r *userResource) Delete( // Delete existing record set _, err = sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId). WaitWithContext(ctx) - // err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute() if err != nil { core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err)) return diff --git a/stackit/internal/wait/postgresflexalpha/wait.go b/stackit/internal/wait/postgresflexalpha/wait.go index 57106cec..4aea71e8 100644 --- a/stackit/internal/wait/postgresflexalpha/wait.go +++ b/stackit/internal/wait/postgresflexalpha/wait.go @@ -108,7 +108,7 @@ func CreateInstanceWaitHandler( ) if extendedTimeout < 3 { maxWait += time.Minute * 5 - extendedTimeout = extendedTimeout + 1 + extendedTimeout++ if *s.Network.AccessScope == "SNA" { ready := true if s.Network == nil || s.Network.InstanceAddress == nil { @@ -228,7 +228,7 @@ func GetUserByIdWaitHandler( if userId > math.MaxInt32 { return false, nil, fmt.Errorf("userId value is too big for int32") } - userId32 := int32(userId) + userId32 := int32(userId) //nolint:gosec // we need to convert databaseId to int32 because API expects int32 s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId32) if err != nil { var oapiErr *oapierror.GenericOpenAPIError @@ -239,9 +239,11 @@ func GetUserByIdWaitHandler( switch oapiErr.StatusCode { case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable: case http.StatusNotFound: - tflog.Warn(ctx, "api responded with status", map[string]interface{}{ - "status": oapiErr.StatusCode, - }) + tflog.Warn( + ctx, "api responded with status", map[string]interface{}{ + "status": oapiErr.StatusCode, + }, + ) return false, nil, nil default: return false, nil, err @@ -262,7 +264,7 @@ func GetDatabaseByIdWaitHandler( ) *wait.AsyncActionHandler[postgresflex.GetDatabaseResponse] { handler := wait.New( func() (waitFinished bool, response *postgresflex.GetDatabaseResponse, err error) { - dbId32 := int32(databaseId) + dbId32 := int32(databaseId) //nolint:gosec // we need to convert databaseId to int32 because API expects int32 s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, dbId32) if err != nil { var oapiErr *oapierror.GenericOpenAPIError @@ -272,14 +274,18 @@ func GetDatabaseByIdWaitHandler( } switch oapiErr.StatusCode { case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable: - tflog.Warn(ctx, "api responded with 50[2,3,4] status", map[string]interface{}{ - "status": oapiErr.StatusCode, - }) + tflog.Warn( + ctx, "api responded with 50[2,3,4] status", map[string]interface{}{ + "status": oapiErr.StatusCode, + }, + ) return false, nil, nil case http.StatusNotFound: - tflog.Warn(ctx, "api responded with 404 status", map[string]interface{}{ - "status": oapiErr.StatusCode, - }) + tflog.Warn( + ctx, "api responded with 404 status", map[string]interface{}{ + "status": oapiErr.StatusCode, + }, + ) return false, nil, nil default: return false, nil, err diff --git a/stackit/internal/wait/sqlserverflexalpha/wait.go b/stackit/internal/wait/sqlserverflexalpha/wait.go index 05fa5eb4..712347d1 100644 --- a/stackit/internal/wait/sqlserverflexalpha/wait.go +++ b/stackit/internal/wait/sqlserverflexalpha/wait.go @@ -54,7 +54,12 @@ type APIClientInterface interface { instanceId string, ) (*sqlserverflex.ListRolesResponse, error) - ListUsersRequest(ctx context.Context, projectId string, region string, instanceId string) sqlserverflex.ApiListUsersRequestRequest + ListUsersRequest( + ctx context.Context, + projectId string, + region string, + instanceId string, + ) sqlserverflex.ApiListUsersRequestRequest ListUsersRequestExecute( ctx context.Context, @@ -256,7 +261,10 @@ func CreateDatabaseWaitHandler( var oapiErr *oapierror.GenericOpenAPIError ok := errors.As(err, &oapiErr) if !ok { - return false, nil, fmt.Errorf("get database - could not convert error to oapierror.GenericOpenAPIError: %s", err.Error()) + return false, nil, fmt.Errorf( + "get database - could not convert error to oapierror.GenericOpenAPIError: %s", + err.Error(), + ) } if oapiErr.StatusCode != http.StatusNotFound { return false, nil, err @@ -318,7 +326,10 @@ func WaitForUserWaitHandler( var oapiErr *oapierror.GenericOpenAPIError ok := errors.As(err, &oapiErr) if !ok { - return false, nil, fmt.Errorf("Wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", err.Error()) + return false, nil, fmt.Errorf( + "wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", + err.Error(), + ) } if oapiErr.StatusCode != http.StatusNotFound { return false, nil, err diff --git a/stackit/internal/wait/sqlserverflexalpha/wait_test.go b/stackit/internal/wait/sqlserverflexalpha/wait_test.go index 4c85e436..ca84ad1e 100644 --- a/stackit/internal/wait/sqlserverflexalpha/wait_test.go +++ b/stackit/internal/wait/sqlserverflexalpha/wait_test.go @@ -116,7 +116,6 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute( }, nil } func TestCreateInstanceWaitHandler(t *testing.T) { - //stateSuccess := utils.Ptr(InstanceStateSuccess) instanceId := utils.Ptr("foo") tests := []struct { desc string @@ -160,7 +159,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) { // Storage: nil, // Version: nil, // }, - //}, + // }, { desc: "create_failed", instanceId: *instanceId, diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go index 41bfa2c1..2660cac5 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait.go @@ -54,7 +54,12 @@ type APIClientInterface interface { instanceId string, ) (*sqlserverflex.ListRolesResponse, error) - ListUsersRequest(ctx context.Context, projectId string, region string, instanceId string) sqlserverflex.ApiListUsersRequestRequest + ListUsersRequest( + ctx context.Context, + projectId string, + region string, + instanceId string, + ) sqlserverflex.ApiListUsersRequestRequest ListUsersRequestExecute( ctx context.Context, @@ -162,9 +167,17 @@ func CreateInstanceWaitHandler( } return true, s, nil case strings.ToLower(InstanceStateUnknown): - return true, nil, fmt.Errorf("create failed for instance %s with status %s", instanceId, InstanceStateUnknown) + return true, nil, fmt.Errorf( + "create failed for instance %s with status %s", + instanceId, + InstanceStateUnknown, + ) case strings.ToLower(InstanceStateFailed): - return true, nil, fmt.Errorf("create failed for instance %s with status %s", instanceId, InstanceStateFailed) + return true, nil, fmt.Errorf( + "create failed for instance %s with status %s", + instanceId, + InstanceStateFailed, + ) case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing): tflog.Info( ctx, "request is being handled", map[string]interface{}{ @@ -268,7 +281,10 @@ func CreateDatabaseWaitHandler( var oapiErr *oapierror.GenericOpenAPIError ok := errors.As(err, &oapiErr) if !ok { - return false, nil, fmt.Errorf("get database - could not convert error to oapierror.GenericOpenAPIError: %s", err.Error()) + return false, nil, fmt.Errorf( + "get database - could not convert error to oapierror.GenericOpenAPIError: %s", + err.Error(), + ) } if oapiErr.StatusCode != http.StatusNotFound { return false, nil, err @@ -330,7 +346,10 @@ func WaitForUserWaitHandler( var oapiErr *oapierror.GenericOpenAPIError ok := errors.As(err, &oapiErr) if !ok { - return false, nil, fmt.Errorf("Wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", err.Error()) + return false, nil, fmt.Errorf( + "wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", + err.Error(), + ) } if oapiErr.StatusCode != http.StatusNotFound { return false, nil, err diff --git a/stackit/internal/wait/sqlserverflexbeta/wait_test.go b/stackit/internal/wait/sqlserverflexbeta/wait_test.go index 825761ce..0d10abae 100644 --- a/stackit/internal/wait/sqlserverflexbeta/wait_test.go +++ b/stackit/internal/wait/sqlserverflexbeta/wait_test.go @@ -116,7 +116,6 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute( }, nil } func TestCreateInstanceWaitHandler(t *testing.T) { - //stateSuccess := utils.Ptr(InstanceStateSuccess) instanceId := utils.Ptr("foo") tests := []struct { desc string @@ -160,7 +159,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) { // Storage: nil, // Version: nil, // }, - //}, + // }, { desc: "create_failed", instanceId: *instanceId, diff --git a/stackit/provider.go b/stackit/provider.go index bb9ffad0..086ae003 100644 --- a/stackit/provider.go +++ b/stackit/provider.go @@ -46,6 +46,7 @@ var ( _ provider.Provider = &Provider{} ) +//nolint:unused // These constants are defined for future use in retry logic for HTTP requests, which is not yet implemented. const ( // maxRetries is the maximum number of retries for a failed HTTP request. maxRetries = 3 @@ -123,6 +124,7 @@ type providerModel struct { // Schema defines the provider-level schema for configuration data. func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) { + //nolint:gosec // These are just descriptions, not actual credentials or sensitive information. descriptions := map[string]string{ "credentials_path": "Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.", "service_account_token": "Token used for authentication. If set, the token flow will be used to authenticate all operations.", @@ -489,7 +491,8 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, return } - //roundTripper := core.NewRetryRoundTripper( + //nolint:gocritic // maybe later in the code + // roundTripper := core.NewRetryRoundTripper( // baseRoundTripper, // maxRetries, // initialDelay, diff --git a/stackit/provider_acc_test.go b/stackit/provider_acc_test.go index 11cdf672..2230d731 100644 --- a/stackit/provider_acc_test.go +++ b/stackit/provider_acc_test.go @@ -12,11 +12,12 @@ import ( "time" "github.com/golang-jwt/jwt/v5" - test "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/google/go-cmp/cmp" + test "github.com/hashicorp/terraform-plugin-testing/helper/resource" //nolint:staticcheck // used for acceptance testing "github.com/jarcoal/httpmock" "github.com/stackitcloud/stackit-sdk-go/core/clients" "github.com/stackitcloud/stackit-sdk-go/core/utils" - "github.com/stretchr/testify/require" + "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" postgresFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor" @@ -40,7 +41,7 @@ import ( "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" "github.com/hashicorp/terraform-plugin-testing/config" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" //nolint:staticcheck // used for acceptance testing ) //go:embed testdata/provider-credentials.tf @@ -67,12 +68,15 @@ func TestMshTest(t *testing.T) { testutils.ActivateEnvironmentHttpMocks() - httpmock.RegisterResponder("POST", `https://service-account.api.stackit.cloud/token`, - func(req *http.Request) (*http.Response, error) { - token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{ - "foo": "bar", - "nbf": time.Date(2015, 10, 10, 12, 0, 0, 0, time.UTC).Unix(), - }) + httpmock.RegisterResponder( + "POST", `https://service-account.api.stackit.cloud/token`, + func(_ *http.Request) (*http.Response, error) { + token := jwt.NewWithClaims( + jwt.SigningMethodHS256, jwt.MapClaims{ + "foo": "bar", + "nbf": time.Date(2015, 10, 10, 12, 0, 0, 0, time.UTC).Unix(), + }, + ) // Sign and get the complete encoded token as a string using the secret tokenString, err := token.SignedString([]byte("mySecret")) if err != nil { @@ -88,10 +92,13 @@ func TestMshTest(t *testing.T) { } return httpmock.NewJsonResponse(http.StatusOK, tR) - }) + }, + ) - httpmock.RegisterResponder("GET", `https://postgres-flex-service.api.eu01.stackit.cloud/v3alpha1/projects/xyz-project-id/regions/eu01/flavors?page=1&size=25&sort=id.asc`, - func(req *http.Request) (*http.Response, error) { + httpmock.RegisterResponder( + "GET", + `https://postgres-flex-service.api.eu01.stackit.cloud/v3alpha1/projects/xyz-project-id/regions/eu01/flavors?page=1&size=25&sort=id.asc`, + func(_ *http.Request) (*http.Response, error) { res := postgresflexalpha.GetFlavorsResponse{ Flavors: &[]postgresflexalpha.ListFlavors{ { @@ -120,15 +127,17 @@ func TestMshTest(t *testing.T) { }, ) - test.Test(t, test.TestCase{ - IsUnitTest: true, - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []test.TestStep{ - { - ConfigVariables: map[string]config.Variable{ - "project_id": config.StringVariable("xyz-project-id"), - }, - Config: fmt.Sprintf(` + test.Test( + t, test.TestCase{ + IsUnitTest: true, + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []test.TestStep{ + { + ConfigVariables: map[string]config.Variable{ + "project_id": config.StringVariable("xyz-project-id"), + }, + Config: fmt.Sprintf( + ` provider "stackitprivatepreview" { default_region = "%[1]s" service_account_key_path = "%[2]s" @@ -144,12 +153,13 @@ func TestMshTest(t *testing.T) { node_type = "Single" storage_class = "premium-perf2-stackit" }`, - os.Getenv("TF_ACC_REGION"), - os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"), - ), + os.Getenv("TF_ACC_REGION"), + os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"), + ), + }, }, }, - }) + ) } func TestUnitProviderHasChildDataSources_Basic(t *testing.T) { @@ -171,12 +181,24 @@ func TestUnitProviderHasChildDataSources_Basic(t *testing.T) { sqlserverFlexBetaUser.NewUserDataSource(), sqlserverFlexBetaFlavor.NewFlavorDataSource(), } - datasources := stackit.New("testing")().(*stackit.Provider).DataSources(context.Background()) + provider, ok := stackit.New("testing")().(*stackit.Provider) + if !ok { + t.Fatal("could not assert provider type") + } + datasources := provider.DataSources(context.Background()) - if !reflect.DeepEqual(len(expectedDataSources), len(datasources)) { - for _, d := range datasources { - require.Containsf(t, expectedDataSources, d(), "Data source %+v was not expected", reflect.TypeOf(d())) - } + expectedMap := map[string]struct{}{} + for _, d := range expectedDataSources { + expectedMap[reflect.TypeOf(d).String()] = struct{}{} + } + + actualMap := map[string]struct{}{} + for _, d := range datasources { + actualMap[reflect.TypeOf(d()).String()] = struct{}{} + } + + if diff := cmp.Diff(expectedMap, actualMap); diff != "" { + t.Errorf("DataSources mismatch (-expected +actual):\n%s", diff) } } @@ -194,12 +216,24 @@ func TestUnitProviderHasChildResources_Basic(t *testing.T) { sqlserverFlexBetaUser.NewUserResource(), sqlserverflexBetaDatabase.NewDatabaseResource(), } - resources := stackit.New("testing")().(*stackit.Provider).Resources(context.Background()) + provider, ok := stackit.New("testing")().(*stackit.Provider) + if !ok { + t.Fatal("could not assert provider type") + } + resources := provider.Resources(context.Background()) - if !reflect.DeepEqual(len(expectedResources), len(resources)) { - for _, d := range resources { - require.Containsf(t, expectedResources, d(), "Resource %+v was not expected", reflect.TypeOf(d())) - } + expectedMap := map[string]struct{}{} + for _, r := range expectedResources { + expectedMap[reflect.TypeOf(r).String()] = struct{}{} + } + + actualMap := map[string]struct{}{} + for _, r := range resources { + actualMap[reflect.TypeOf(r()).String()] = struct{}{} + } + + if diff := cmp.Diff(expectedMap, actualMap); diff != "" { + t.Errorf("Resources mismatch (-expected +actual):\n%s", diff) } } @@ -209,23 +243,25 @@ func TestAccEnvVarServiceAccountPathValid(t *testing.T) { if v := os.Getenv(resource.EnvTfAcc); v == "" { t.Skipf( "Acceptance tests skipped unless env '%s' set", - resource.EnvTfAcc) + resource.EnvTfAcc, + ) return } - // t.Setenv("STACKIT_CREDENTIALS_PATH", "") tempHomeFolder := testutils.CreateTemporaryHome(true, t) defer testutils.CleanupTemporaryHome(tempHomeFolder, t) - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - { - PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, - ConfigVariables: testConfigProviderCredentials, - Config: providerCredentialConfig, + resource.Test( + t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, + ConfigVariables: testConfigProviderCredentials, + Config: providerCredentialConfig, + }, }, }, - }) + ) } func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) { @@ -233,17 +269,19 @@ func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) { t.Setenv("STACKIT_CREDENTIALS_PATH", "") tempHomeFolder := testutils.CreateTemporaryHome(false, t) defer testutils.CleanupTemporaryHome(tempHomeFolder, t) - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - { - PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, - ConfigVariables: testConfigProviderCredentials, - Config: providerCredentialConfig, - ExpectError: regexp.MustCompile(`undefined response type, status code 401`), + resource.Test( + t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, + ConfigVariables: testConfigProviderCredentials, + Config: providerCredentialConfig, + ExpectError: regexp.MustCompile(`undefined response type, status code 401`), + }, }, }, - }) + ) } func TestAccCredentialsFileValid(t *testing.T) { @@ -251,16 +289,18 @@ func TestAccCredentialsFileValid(t *testing.T) { t.Setenv("STACKIT_CREDENTIALS_PATH", "") tempHomeFolder := testutils.CreateTemporaryHome(true, t) defer testutils.CleanupTemporaryHome(tempHomeFolder, t) - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - { - PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, - ConfigVariables: testConfigProviderCredentials, - Config: providerCredentialConfig, + resource.Test( + t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, + ConfigVariables: testConfigProviderCredentials, + Config: providerCredentialConfig, + }, }, }, - }) + ) } func TestAccCredentialsFileInvalid(t *testing.T) { @@ -268,17 +308,19 @@ func TestAccCredentialsFileInvalid(t *testing.T) { t.Setenv("STACKIT_CREDENTIALS_PATH", "") tempHomeFolder := testutils.CreateTemporaryHome(false, t) defer testutils.CleanupTemporaryHome(tempHomeFolder, t) - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - { - PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, - ConfigVariables: testConfigProviderCredentials, - Config: providerCredentialConfig, - ExpectError: regexp.MustCompile(`Jwt is not in(\r\n|\r|\n)the form of Header.Payload.Signature`), + resource.Test( + t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) }, + ConfigVariables: testConfigProviderCredentials, + Config: providerCredentialConfig, + ExpectError: regexp.MustCompile(`Jwt is not in(\r\n|\r|\n)the form of Header.Payload.Signature`), + }, }, }, - }) + ) } func TestAccProviderConfigureValidValues(t *testing.T) { @@ -287,21 +329,25 @@ func TestAccProviderConfigureValidValues(t *testing.T) { if v := os.Getenv(resource.EnvTfAcc); v == "" { t.Skipf( "Acceptance tests skipped unless env '%s' set", - resource.EnvTfAcc) + resource.EnvTfAcc, + ) return } t.Setenv("STACKIT_CREDENTIALS_PATH", "") tempHomeFolder := testutils.CreateTemporaryHome(true, t) defer testutils.CleanupTemporaryHome(tempHomeFolder, t) - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - { // valid provider attributes - ConfigVariables: testConfigProviderCredentials, - Config: providerValidAttributes, + resource.Test( + t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + // valid provider attributes + ConfigVariables: testConfigProviderCredentials, + Config: providerValidAttributes, + }, }, }, - }) + ) } func TestAccProviderConfigureAnInvalidValue(t *testing.T) { @@ -310,21 +356,25 @@ func TestAccProviderConfigureAnInvalidValue(t *testing.T) { if v := os.Getenv(resource.EnvTfAcc); v == "" { t.Skipf( "Acceptance tests skipped unless env '%s' set", - resource.EnvTfAcc) + resource.EnvTfAcc, + ) return } t.Setenv("STACKIT_CREDENTIALS_PATH", "") tempHomeFolder := testutils.CreateTemporaryHome(true, t) defer testutils.CleanupTemporaryHome(tempHomeFolder, t) - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - { // invalid test attribute should throw an error - ConfigVariables: testConfigProviderCredentials, - Config: providerInvalidAttribute, - ExpectError: regexp.MustCompile(`An argument named "test" is not expected here\.`), + resource.Test( + t, resource.TestCase{ + ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + // invalid test attribute should throw an error + ConfigVariables: testConfigProviderCredentials, + Config: providerInvalidAttribute, + ExpectError: regexp.MustCompile(`An argument named "test" is not expected here\.`), + }, }, }, - }) + ) } diff --git a/tools/tools.go b/tools/tools.go index 075c26d5..e9567c7f 100644 --- a/tools/tools.go +++ b/tools/tools.go @@ -1,3 +1,5 @@ +//go:build tools + package tools // Format Terraform code for use in documentation. @@ -7,3 +9,11 @@ package tools // Generate documentation. //go:generate go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-dir .. -provider-name stackitprivatepreview + +import ( + _ "github.com/golangci/golangci-lint/v2/cmd/golangci-lint" + _ "github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework" + _ "github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi" + _ "github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs" + _ "golang.org/x/tools/cmd/goimports" +) From eb13630d2f2058ef92a2bf5457909cb64f3940c7 Mon Sep 17 00:00:00 2001 From: "marcel.henselin" Date: Fri, 27 Feb 2026 10:08:09 +0000 Subject: [PATCH 39/41] feat: test STACKIT runner (#78) Signed-off-by: marcel.henselin ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/78 Co-authored-by: marcel.henselin Co-committed-by: marcel.henselin --- .github/workflows/ci.yaml | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f229dcb4..e747e730 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -22,6 +22,39 @@ env: CODE_COVERAGE_ARTIFACT_NAME: "code-coverage" jobs: + runner_test: + name: "Test STACKIT runner" + runs-on: stackit-docker + steps: + - name: Install needed tools + run: | + apt-get -y -qq update + apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget + + - name: Setup Go + uses: actions/setup-go@v6 + with: + go-version: ${{ env.GO_VERSION }} + + - name: Install go tools + run: | + go install golang.org/x/tools/cmd/goimports@latest + go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest + go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest + + - name: Setup JAVA + uses: actions/setup-java@v5 + with: + distribution: 'temurin' # See 'Supported distributions' for available options + java-version: '21' + + - name: Checkout + uses: actions/checkout@v6 + + - name: Run build pkg directory + run: | + go run cmd/main.go build + publish_test: name: "Test readiness for publishing provider" needs: config From 07458c5677e0e9809d9aac2932547a877c3066dc Mon Sep 17 00:00:00 2001 From: "marcel.henselin" Date: Fri, 27 Feb 2026 10:20:02 +0000 Subject: [PATCH 40/41] feat: add runner stats (#79) Signed-off-by: marcel.henselin ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/79 Co-authored-by: marcel.henselin Co-committed-by: marcel.henselin --- .github/workflows/runnerstats.yaml | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 .github/workflows/runnerstats.yaml diff --git a/.github/workflows/runnerstats.yaml b/.github/workflows/runnerstats.yaml new file mode 100644 index 00000000..330e6e0f --- /dev/null +++ b/.github/workflows/runnerstats.yaml @@ -0,0 +1,29 @@ +name: Runner stats + +on: + workflow_dispatch: + +jobs: + stats-own: + name: "Get own runner stats" + runs-on: ubuntu-latest + steps: + - name: Install needed tools + run: | + apt-get -y -qq update + apt-get -y -qq install inxi + + - name: Show stats + run: inxi + + stats-stackit: + name: "Get STACKIT runner stats" + runs-on: stackit-docker + steps: + - name: Install needed tools + run: | + apt-get -y -qq update + apt-get -y -qq install inxi + + - name: Show stats + run: inxi From 635a9abf205c28890dd270a4b6312eb4be13da3e Mon Sep 17 00:00:00 2001 From: "marcel.henselin" Date: Fri, 27 Feb 2026 10:25:10 +0000 Subject: [PATCH 41/41] fix: disable shell color in runnerstats (#80) Signed-off-by: marcel.henselin ## Description relates to #1234 ## Checklist - [ ] Issue was linked above - [ ] Code format was applied: `make fmt` - [ ] Examples were added / adjusted (see `examples/` directory) - [x] Docs are up-to-date: `make generate-docs` (will be checked by CI) - [ ] Unit tests got implemented or updated - [ ] Acceptance tests got implemented or updated (see e.g. [here](https://github.com/stackitcloud/terraform-provider-stackit/blob/f5f99d170996b208672ae684b6da53420e369563/stackit/internal/services/dns/dns_acc_test.go)) - [x] Unit tests are passing: `make test` (will be checked by CI) - [x] No linter issues: `make lint` (will be checked by CI) Reviewed-on: https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pulls/80 Co-authored-by: marcel.henselin Co-committed-by: marcel.henselin --- .github/workflows/runnerstats.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/runnerstats.yaml b/.github/workflows/runnerstats.yaml index 330e6e0f..08190d4c 100644 --- a/.github/workflows/runnerstats.yaml +++ b/.github/workflows/runnerstats.yaml @@ -14,7 +14,7 @@ jobs: apt-get -y -qq install inxi - name: Show stats - run: inxi + run: inxi -c 0 stats-stackit: name: "Get STACKIT runner stats" @@ -26,4 +26,4 @@ jobs: apt-get -y -qq install inxi - name: Show stats - run: inxi + run: inxi -c 0