diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go
index f210b8d2..30fbf552 100644
--- a/cmd/cmd/build/build.go
+++ b/cmd/cmd/build/build.go
@@ -1,6 +1,7 @@
package build
import (
+ "bufio"
"bytes"
"errors"
"fmt"
@@ -509,7 +510,7 @@ func generateServiceFiles(rootDir, generatorDir string) error {
oasFile := path.Join(generatorDir, "oas", fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()))
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
- slog.Warn(" coulc not find matching oas", "svc", service.Name(), "version", svcVersion.Name())
+ slog.Warn(" could not find matching oas", "svc", service.Name(), "version", svcVersion.Name())
continue
}
@@ -648,6 +649,15 @@ func generateServiceFiles(rootDir, generatorDir string) error {
return err
}
}
+
+ tfAnoErr := handleTfTagForDatasourceFile(
+ path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
+ scName,
+ resource,
+ )
+ if tfAnoErr != nil {
+ return tfAnoErr
+ }
}
}
}
@@ -655,6 +665,70 @@ func generateServiceFiles(rootDir, generatorDir string) error {
return nil
}
+// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
+func handleTfTagForDatasourceFile(filePath, service, resource string) error {
+ slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
+ if !fileExists(filePath) {
+ slog.Warn(" could not find file, skipping", "path", filePath)
+ return nil
+ }
+ f, err := os.Open(filePath)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ tmp, err := os.CreateTemp("", "replace-*")
+ if err != nil {
+ return err
+ }
+ defer tmp.Close()
+
+ sc := bufio.NewScanner(f)
+ for sc.Scan() {
+ resLine, err := handleLine(sc.Text())
+ if err != nil {
+ return err
+ }
+ if _, err := io.WriteString(tmp, resLine+"\n"); err != nil {
+ return err
+ }
+ }
+ if scErr := sc.Err(); scErr != nil {
+ return scErr
+ }
+
+ if err := tmp.Close(); err != nil {
+ return err
+ }
+
+ if err := f.Close(); err != nil {
+ return err
+ }
+
+ if err := os.Rename(tmp.Name(), filePath); err != nil {
+ log.Fatal(err)
+ }
+ return nil
+}
+
+func handleLine(line string) (string, error) {
+ schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
+
+ schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1)
+ if schemaMatches != nil {
+ return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil
+ }
+
+ modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`)
+ modelMatches := modelRegex.FindAllStringSubmatch(line, -1)
+ if modelMatches != nil {
+ return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil
+ }
+
+ return line, nil
+}
+
func checkCommands(commands []string) error {
for _, commandName := range commands {
if !commandExists(commandName) {
diff --git a/docs/data-sources/postgresflexalpha_flavors.md b/docs/data-sources/postgresflexalpha_flavors.md
index f90ae257..06645bb4 100644
--- a/docs/data-sources/postgresflexalpha_flavors.md
+++ b/docs/data-sources/postgresflexalpha_flavors.md
@@ -38,12 +38,12 @@ Read-Only:
- `cpu` (Number) The cpu count of the instance.
- `description` (String) The flavor description.
-- `id` (String) The id of the instance flavor.
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
- `memory` (Number) The memory of the instance in Gibibyte.
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
- `node_type` (String) defines the nodeType it can be either single or replica
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--flavors--storage_classes))
+- `tf_original_api_id` (String) The id of the instance flavor.
### Nested Schema for `flavors.storage_classes`
diff --git a/docs/data-sources/postgresflexalpha_instance.md b/docs/data-sources/postgresflexalpha_instance.md
index 54d887ea..466745a6 100644
--- a/docs/data-sources/postgresflexalpha_instance.md
+++ b/docs/data-sources/postgresflexalpha_instance.md
@@ -37,7 +37,6 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
- `flavor_id` (String) The id of the instance flavor.
-- `id` (String) The ID of the instance.
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
- `name` (String) The name of the instance.
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
@@ -45,6 +44,7 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
- `status` (String) The current status of the instance.
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `tf_original_api_id` (String) The ID of the instance.
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
diff --git a/docs/data-sources/sqlserverflexalpha_database.md b/docs/data-sources/sqlserverflexalpha_database.md
index 4aab99cc..5db648f4 100644
--- a/docs/data-sources/sqlserverflexalpha_database.md
+++ b/docs/data-sources/sqlserverflexalpha_database.md
@@ -26,6 +26,6 @@ description: |-
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
- `compatibility_level` (Number) CompatibilityLevel of the Database.
-- `id` (Number) The id of the database.
- `name` (String) The name of the database.
- `owner` (String) The owner of the database.
+- `tf_original_api_id` (Number) The id of the database.
diff --git a/docs/data-sources/sqlserverflexalpha_instance.md b/docs/data-sources/sqlserverflexalpha_instance.md
index 134eb567..b05d7b8e 100644
--- a/docs/data-sources/sqlserverflexalpha_instance.md
+++ b/docs/data-sources/sqlserverflexalpha_instance.md
@@ -34,7 +34,6 @@ data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
- `edition` (String) Edition of the MSSQL server instance
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
- `flavor_id` (String) The id of the instance flavor.
-- `id` (String) The ID of the instance.
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
- `name` (String) The name of the instance.
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
@@ -42,6 +41,7 @@ data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
- `status` (String)
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `tf_original_api_id` (String) The ID of the instance.
- `version` (String) The sqlserver version used for the instance.
diff --git a/docs/data-sources/sqlserverflexbeta_instance.md b/docs/data-sources/sqlserverflexbeta_instance.md
index cc3645ef..431f95f1 100644
--- a/docs/data-sources/sqlserverflexbeta_instance.md
+++ b/docs/data-sources/sqlserverflexbeta_instance.md
@@ -34,7 +34,6 @@ data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
- `edition` (String) Edition of the MSSQL server instance
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
- `flavor_id` (String) The id of the instance flavor.
-- `id` (String) The ID of the instance.
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
- `name` (String) The name of the instance.
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
@@ -42,6 +41,7 @@ data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
- `status` (String)
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `tf_original_api_id` (String) The ID of the instance.
- `version` (String) The sqlserver version used for the instance.
diff --git a/stackit/internal/services/postgresflexalpha/database/datasource.go b/stackit/internal/services/postgresflexalpha/database/datasource.go
index 36fc5333..4a89be17 100644
--- a/stackit/internal/services/postgresflexalpha/database/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/database/datasource.go
@@ -5,18 +5,17 @@ import (
"fmt"
"net/http"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
-
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
-
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
+ postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
@@ -30,6 +29,12 @@ func NewDatabaseDataSource() datasource.DataSource {
return &databaseDataSource{}
}
+// dataSourceModel maps the data source schema data.
+type dataSourceModel struct {
+ postgresflexalpha2.DatabaseModel
+ TerraformID types.String `tfsdk:"id"`
+}
+
// databaseDataSource is the data source implementation.
type databaseDataSource struct {
client *postgresflexalpha.APIClient
@@ -66,132 +71,46 @@ func (r *databaseDataSource) Configure(
}
// Schema defines the schema for the data source.
-func (r *databaseDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- descriptions := map[string]string{
- "main": "Postgres Flex database resource schema. Must have a `region` specified in the provider configuration.",
- "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
- "database_id": "Database ID.",
- "instance_id": "ID of the Postgres Flex instance.",
- "project_id": "STACKIT project ID to which the instance is associated.",
- "name": "Database name.",
- "owner": "Username of the database owner.",
- "region": "The resource region. If not defined, the provider region is used.",
+func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+
+ s := postgresflexalpha2.DatabaseDataSourceSchema(ctx)
+ s.Attributes["id"] = schema.StringAttribute{
+ Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," +
+ "`database_id`\\\".\",",
+ Computed: true,
}
- resp.Schema = schema.Schema{
- Description: descriptions["main"],
- Attributes: map[string]schema.Attribute{
- "id": schema.StringAttribute{
- Description: descriptions["id"],
- Computed: true,
- },
- "database_id": schema.Int64Attribute{
- Description: descriptions["database_id"],
- Optional: true,
- Computed: true,
- },
- "instance_id": schema.StringAttribute{
- Description: descriptions["instance_id"],
- Required: true,
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
- },
- "project_id": schema.StringAttribute{
- Description: descriptions["project_id"],
- Required: true,
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
- },
- "name": schema.StringAttribute{
- Description: descriptions["name"],
- Optional: true,
- Computed: true,
- Validators: []validator.String{
- stringvalidator.LengthAtLeast(1),
- },
- },
- "owner": schema.StringAttribute{
- Description: descriptions["owner"],
- Computed: true,
- },
- "region": schema.StringAttribute{
- // the region cannot be found, so it has to be passed
- Optional: true,
- Description: descriptions["region"],
- },
- },
- }
+ resp.Schema = s
}
-// Read refreshes the Terraform state with the latest data.
+// Read fetches the data for the data source.
func (r *databaseDataSource) Read(
ctx context.Context,
req datasource.ReadRequest,
resp *datasource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model dataSourceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
- // validation for exactly one of database_id or name
- isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
- isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
-
- if (isIdSet && isNameSet) || (!isIdSet && !isNameSet) {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Invalid configuration", "Exactly one of 'database_id' or 'name' must be specified.",
- )
- return
- }
-
ctx = core.InitProviderContext(ctx)
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
- databaseId := model.DatabaseId.ValueInt64()
region := r.providerData.GetRegionWithOverride(model.Region)
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = tflog.SetField(ctx, "region", region)
- var databaseResp *postgresflexalpha.ListDatabase
- var err error
-
- if isIdSet {
- databaseId := model.DatabaseId.ValueInt64()
- ctx = tflog.SetField(ctx, "database_id", databaseId)
- databaseResp, err = getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
- } else {
- databaseName := model.Name.ValueString()
- ctx = tflog.SetField(ctx, "name", databaseName)
- databaseResp, err = getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName)
+ databaseResp, err := r.getDatabaseByNameOrID(ctx, &model, projectId, region, instanceId, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
+ return
}
-
if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading database",
- fmt.Sprintf(
- "Database with ID %q or instance with ID %q does not exist in project %q.",
- databaseId,
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
+ handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId)
resp.State.RemoveResource(ctx)
return
}
@@ -218,3 +137,60 @@ func (r *databaseDataSource) Read(
}
tflog.Info(ctx, "Postgres Flex database read")
}
+
+// getDatabaseByNameOrID retrieves a single database by ensuring either a unique ID or name is provided.
+func (r *databaseDataSource) getDatabaseByNameOrID(
+ ctx context.Context,
+ model *dataSourceModel,
+ projectId, region, instanceId string,
+ diags *diag.Diagnostics,
+) (*postgresflexalpha.ListDatabase, error) {
+ isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
+ isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
+
+ if (isIdSet && isNameSet) || (!isIdSet && !isNameSet) {
+ diags.AddError(
+ "Invalid configuration",
+ "Exactly one of 'id' or 'name' must be specified.",
+ )
+ return nil, nil
+ }
+
+ if isIdSet {
+ databaseId := model.DatabaseId.ValueInt64()
+ ctx = tflog.SetField(ctx, "database_id", databaseId)
+ return getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
+ }
+
+ databaseName := model.Name.ValueString()
+ ctx = tflog.SetField(ctx, "name", databaseName)
+ return getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName)
+}
+
+// handleReadError centralizes API error handling for the Read operation.
+func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) {
+ utils.LogError(
+ ctx,
+ diags,
+ err,
+ "Reading database",
+ fmt.Sprintf(
+ "Could not retrieve database for instance %q in project %q.",
+ instanceId,
+ projectId,
+ ),
+ map[int]string{
+ http.StatusBadRequest: fmt.Sprintf(
+ "Invalid request parameters for project %q and instance %q.",
+ projectId,
+ instanceId,
+ ),
+ http.StatusNotFound: fmt.Sprintf(
+ "Database, instance %q, or project %q not found.",
+ instanceId,
+ projectId,
+ ),
+ http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
+ },
+ )
+}
diff --git a/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go
new file mode 100644
index 00000000..d5683a6c
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go
@@ -0,0 +1,69 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package postgresflexalpha
+
+import (
+ "context"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "database_id": schema.Int64Attribute{
+ Required: true,
+ Description: "The ID of the database.",
+ MarkdownDescription: "The ID of the database.",
+ },
+ "tf_original_api_id": schema.Int64Attribute{
+ Computed: true,
+ Description: "The id of the database.",
+ MarkdownDescription: "The id of the database.",
+ },
+ "instance_id": schema.StringAttribute{
+ Required: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ Description: "The name of the database.",
+ MarkdownDescription: "The name of the database.",
+ },
+ "owner": schema.StringAttribute{
+ Computed: true,
+ Description: "The owner of the database.",
+ MarkdownDescription: "The owner of the database.",
+ },
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ },
+ }
+}
+
+type DatabaseModel struct {
+ DatabaseId types.Int64 `tfsdk:"database_id"`
+ Id types.Int64 `tfsdk:"tf_original_api_id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Name types.String `tfsdk:"name"`
+ Owner types.String `tfsdk:"owner"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+}
diff --git a/stackit/internal/services/postgresflexalpha/database/functions.go b/stackit/internal/services/postgresflexalpha/database/functions.go
index b1c30bb9..4496faa1 100644
--- a/stackit/internal/services/postgresflexalpha/database/functions.go
+++ b/stackit/internal/services/postgresflexalpha/database/functions.go
@@ -3,6 +3,7 @@ package postgresflexalpha
import (
"context"
"fmt"
+ "strings"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
@@ -79,3 +80,12 @@ func getDatabase(
return nil, fmt.Errorf("database not found for instance %s", instanceId)
}
+
+// cleanString removes leading and trailing quotes which are sometimes returned by the API.
+func cleanString(s *string) *string {
+ if s == nil {
+ return nil
+ }
+ res := strings.Trim(*s, "\"")
+ return &res
+}
diff --git a/stackit/internal/services/postgresflexalpha/database/functions_test.go b/stackit/internal/services/postgresflexalpha/database/functions_test.go
index 7ec941db..9f0b47fd 100644
--- a/stackit/internal/services/postgresflexalpha/database/functions_test.go
+++ b/stackit/internal/services/postgresflexalpha/database/functions_test.go
@@ -4,6 +4,7 @@ import (
"context"
"testing"
+ "github.com/google/go-cmp/cmp"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
@@ -12,8 +13,8 @@ type mockRequest struct {
executeFunc func() (*postgresflex.ListDatabasesResponse, error)
}
-func (m *mockRequest) Page(_ int64) postgresflex.ApiListDatabasesRequestRequest { return m }
-func (m *mockRequest) Size(_ int64) postgresflex.ApiListDatabasesRequestRequest { return m }
+func (m *mockRequest) Page(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
+func (m *mockRequest) Size(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
func (m *mockRequest) Sort(_ postgresflex.DatabaseSort) postgresflex.ApiListDatabasesRequestRequest {
return m
}
@@ -176,21 +177,56 @@ func TestGetDatabase(t *testing.T) {
}
if (errDB != nil) != tt.wantErr {
- t.Errorf("getDatabase() error = %v, wantErr %v", errDB, tt.wantErr)
+ t.Errorf("getDatabaseByNameOrID() error = %v, wantErr %v", errDB, tt.wantErr)
return
}
if !tt.wantErr && tt.wantDbName != "" && actual != nil {
if *actual.Name != tt.wantDbName {
- t.Errorf("getDatabase() got name = %v, want %v", *actual.Name, tt.wantDbName)
+ t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", *actual.Name, tt.wantDbName)
}
}
if !tt.wantErr && tt.wantDbId != 0 && actual != nil {
if *actual.Id != tt.wantDbId {
- t.Errorf("getDatabase() got id = %v, want %v", *actual.Id, tt.wantDbId)
+ t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", *actual.Id, tt.wantDbId)
}
}
},
)
}
}
+
+func TestCleanString(t *testing.T) {
+ testcases := []struct {
+ name string
+ given *string
+ expected *string
+ }{
+ {
+ name: "should remove quotes",
+ given: utils.Ptr("\"quoted\""),
+ expected: utils.Ptr("quoted"),
+ },
+ {
+ name: "should handle nil",
+ given: nil,
+ expected: nil,
+ },
+ {
+ name: "should not change unquoted string",
+ given: utils.Ptr("unquoted"),
+ expected: utils.Ptr("unquoted"),
+ },
+ }
+
+ for _, tc := range testcases {
+ t.Run(
+ tc.name, func(t *testing.T) {
+ actual := cleanString(tc.given)
+ if diff := cmp.Diff(tc.expected, actual); diff != "" {
+ t.Errorf("string mismatch (-want +got):\n%s", diff)
+ }
+ },
+ )
+ }
+}
diff --git a/stackit/internal/services/postgresflexalpha/database/mapper.go b/stackit/internal/services/postgresflexalpha/database/mapper.go
new file mode 100644
index 00000000..5785f4b7
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/database/mapper.go
@@ -0,0 +1,92 @@
+package postgresflexalpha
+
+import (
+ "fmt"
+ "strconv"
+
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+)
+
+// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
+func mapFields(
+ source *postgresflexalpha.ListDatabase,
+ model *dataSourceModel,
+ region string,
+) error {
+ if source == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if source.Id == nil || *source.Id == 0 {
+ return fmt.Errorf("id not present")
+ }
+ if model == nil {
+ return fmt.Errorf("model given is nil")
+ }
+
+ var databaseId int64
+ if model.DatabaseId.ValueInt64() != 0 {
+ databaseId = model.DatabaseId.ValueInt64()
+ } else if source.Id != nil {
+ databaseId = *source.Id
+ } else {
+ return fmt.Errorf("database id not present")
+ }
+
+ model.Id = types.Int64Value(databaseId)
+ model.DatabaseId = types.Int64Value(databaseId)
+ model.Name = types.StringValue(source.GetName())
+ model.Owner = types.StringPointerValue(cleanString(source.Owner))
+ model.Region = types.StringValue(region)
+ model.ProjectId = types.StringValue(model.ProjectId.ValueString())
+ model.InstanceId = types.StringValue(model.InstanceId.ValueString())
+ model.TerraformID = utils.BuildInternalTerraformId(
+ model.ProjectId.ValueString(),
+ region,
+ model.InstanceId.ValueString(),
+ strconv.FormatInt(databaseId, 10),
+ )
+
+ return nil
+}
+
+// mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource.
+func mapResourceFields(source *postgresflexalpha.ListDatabase, model *resourceModel) error {
+ if source == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if source.Id == nil || *source.Id == 0 {
+ return fmt.Errorf("id not present")
+ }
+ if model == nil {
+ return fmt.Errorf("model input is nil")
+ }
+
+ var databaseId int64
+ if model.Id.ValueInt64() != 0 {
+ databaseId = model.Id.ValueInt64()
+ } else if source.Id != nil {
+ databaseId = *source.Id
+ } else {
+ return fmt.Errorf("database id not present")
+ }
+
+ model.Id = types.Int64Value(databaseId)
+ model.DatabaseId = types.Int64Value(databaseId)
+ model.Name = types.StringValue(source.GetName())
+ model.Owner = types.StringPointerValue(cleanString(source.Owner))
+ return nil
+}
+
+// toCreatePayload converts the resource model to an API create payload.
+func toCreatePayload(model *resourceModel) (*postgresflexalpha.CreateDatabaseRequestPayload, error) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+
+ return &postgresflexalpha.CreateDatabaseRequestPayload{
+ Name: model.Name.ValueStringPointer(),
+ Owner: model.Owner.ValueStringPointer(),
+ }, nil
+}
diff --git a/stackit/internal/services/postgresflexalpha/database/mapper_test.go b/stackit/internal/services/postgresflexalpha/database/mapper_test.go
new file mode 100644
index 00000000..a2f18c12
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/database/mapper_test.go
@@ -0,0 +1,240 @@
+package postgresflexalpha
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
+)
+
+func TestMapFields(t *testing.T) {
+ type given struct {
+ source *postgresflexalpha.ListDatabase
+ model *dataSourceModel
+ region string
+ }
+ type expected struct {
+ model *dataSourceModel
+ err bool
+ }
+
+ testcases := []struct {
+ name string
+ given given
+ expected expected
+ }{
+ {
+ name: "should map fields correctly",
+ given: given{
+ source: &postgresflexalpha.ListDatabase{
+ Id: utils.Ptr(int64(1)),
+ Name: utils.Ptr("my-db"),
+ Owner: utils.Ptr("\"my-owner\""),
+ },
+ model: &dataSourceModel{},
+ region: "eu01",
+ },
+ expected: expected{
+ model: &dataSourceModel{
+ DatabaseModel: datasource.DatabaseModel{
+ Id: types.Int64Value(1),
+ Name: types.StringValue("my-db"),
+ Owner: types.StringValue("my-owner"),
+ Region: types.StringValue("eu01"),
+ DatabaseId: types.Int64Value(1),
+ InstanceId: types.StringValue("my-instance"),
+ ProjectId: types.StringValue("my-project"),
+ },
+ TerraformID: types.StringValue("my-project,eu01,my-instance,1"),
+ },
+ },
+ },
+ {
+ name: "should preserve existing model ID",
+ given: given{
+ source: &postgresflexalpha.ListDatabase{
+ Id: utils.Ptr(int64(1)),
+ Name: utils.Ptr("my-db"),
+ },
+ model: &dataSourceModel{
+ DatabaseModel: datasource.DatabaseModel{
+ Id: types.Int64Value(1),
+ ProjectId: types.StringValue("my-project"),
+ InstanceId: types.StringValue("my-instance"),
+ },
+ },
+ region: "eu01",
+ },
+ expected: expected{
+ model: &dataSourceModel{
+ DatabaseModel: datasource.DatabaseModel{
+ Id: types.Int64Value(1),
+ Name: types.StringValue("my-db"),
+ Owner: types.StringNull(), DatabaseId: types.Int64Value(1),
+ Region: types.StringValue("eu01"),
+ InstanceId: types.StringValue("my-instance"),
+ ProjectId: types.StringValue("my-project"),
+ },
+ TerraformID: types.StringValue("my-project,eu01,my-instance,1"),
+ },
+ },
+ },
+ {
+ name: "should fail on nil source",
+ given: given{
+ source: nil,
+ model: &dataSourceModel{},
+ },
+ expected: expected{err: true},
+ },
+ {
+ name: "should fail on nil source ID",
+ given: given{
+ source: &postgresflexalpha.ListDatabase{Id: nil},
+ model: &dataSourceModel{},
+ },
+ expected: expected{err: true},
+ },
+ {
+ name: "should fail on nil model",
+ given: given{
+ source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(int64(1))},
+ model: nil,
+ },
+ expected: expected{err: true},
+ },
+ }
+
+ for _, tc := range testcases {
+ t.Run(
+ tc.name, func(t *testing.T) {
+ err := mapFields(tc.given.source, tc.given.model, tc.given.region)
+ if (err != nil) != tc.expected.err {
+ t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
+ }
+ if err == nil {
+ if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
+ t.Errorf("model mismatch (-want +got):\n%s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestMapResourceFields(t *testing.T) {
+ type given struct {
+ source *postgresflexalpha.ListDatabase
+ model *resourceModel
+ }
+ type expected struct {
+ model *resourceModel
+ err bool
+ }
+
+ testcases := []struct {
+ name string
+ given given
+ expected expected
+ }{
+ {
+ name: "should map fields correctly",
+ given: given{
+ source: &postgresflexalpha.ListDatabase{
+ Id: utils.Ptr(int64(1)),
+ Name: utils.Ptr("my-db"),
+ Owner: utils.Ptr("\"my-owner\""),
+ },
+ model: &resourceModel{},
+ },
+ expected: expected{
+ model: &resourceModel{
+ Id: types.Int64Value(1),
+ Name: types.StringValue("my-db"),
+ Owner: types.StringValue("my-owner"),
+ DatabaseId: types.Int64Value(1),
+ },
+ },
+ },
+ {
+ name: "should fail on nil source",
+ given: given{
+ source: nil,
+ model: &resourceModel{},
+ },
+ expected: expected{err: true},
+ },
+ }
+
+ for _, tc := range testcases {
+ t.Run(
+ tc.name, func(t *testing.T) {
+ err := mapResourceFields(tc.given.source, tc.given.model)
+ if (err != nil) != tc.expected.err {
+ t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
+ }
+ if err == nil {
+ if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
+ t.Errorf("model mismatch (-want +got):\n%s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestToCreatePayload(t *testing.T) {
+ type given struct {
+ model *resourceModel
+ }
+ type expected struct {
+ payload *postgresflexalpha.CreateDatabaseRequestPayload
+ err bool
+ }
+
+ testcases := []struct {
+ name string
+ given given
+ expected expected
+ }{
+ {
+ name: "should convert model to payload",
+ given: given{
+ model: &resourceModel{
+ Name: types.StringValue("my-db"),
+ Owner: types.StringValue("my-owner"),
+ },
+ },
+ expected: expected{
+ payload: &postgresflexalpha.CreateDatabaseRequestPayload{
+ Name: utils.Ptr("my-db"),
+ Owner: utils.Ptr("my-owner"),
+ },
+ },
+ },
+ {
+ name: "should fail on nil model",
+ given: given{model: nil},
+ expected: expected{err: true},
+ },
+ }
+
+ for _, tc := range testcases {
+ t.Run(
+ tc.name, func(t *testing.T) {
+ actual, err := toCreatePayload(tc.given.model)
+ if (err != nil) != tc.expected.err {
+ t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
+ }
+ if err == nil {
+ if diff := cmp.Diff(tc.expected.payload, actual); diff != "" {
+ t.Errorf("payload mismatch (-want +got):\n%s", diff)
+ }
+ }
+ },
+ )
+ }
+}
diff --git a/stackit/internal/services/postgresflexalpha/database/planModifiers.yaml b/stackit/internal/services/postgresflexalpha/database/planModifiers.yaml
new file mode 100644
index 00000000..f3f70aeb
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/database/planModifiers.yaml
@@ -0,0 +1,35 @@
+fields:
+ - name: 'id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'database_id'
+ modifiers:
+ - 'UseStateForUnknown'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+
+ - name: 'instance_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'RequiresReplace'
+ - 'UseStateForUnknown'
+
+ - name: 'project_id'
+ modifiers:
+ - 'RequiresReplace'
+ - 'UseStateForUnknown'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+
+ - name: 'name'
+ validators:
+ - validate.NoSeparator
+
+ - name: 'region'
+ modifiers:
+ - 'RequiresReplace'
diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go
index 67d1e477..64c62e70 100644
--- a/stackit/internal/services/postgresflexalpha/database/resource.go
+++ b/stackit/internal/services/postgresflexalpha/database/resource.go
@@ -2,70 +2,73 @@ package postgresflexalpha
import (
"context"
+ _ "embed"
"errors"
"fmt"
"math"
"net/http"
- "regexp"
"strconv"
"strings"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
)
-// Ensure the implementation satisfies the expected interfaces.
var (
+ // Ensure the implementation satisfies the expected interfaces.
_ resource.Resource = &databaseResource{}
_ resource.ResourceWithConfigure = &databaseResource{}
_ resource.ResourceWithImportState = &databaseResource{}
_ resource.ResourceWithModifyPlan = &databaseResource{}
-)
+ _ resource.ResourceWithIdentity = &databaseResource{}
-type Model struct {
- Id types.String `tfsdk:"id"` // needed by TF
- DatabaseId types.Int64 `tfsdk:"database_id"`
- InstanceId types.String `tfsdk:"instance_id"`
- ProjectId types.String `tfsdk:"project_id"`
- Name types.String `tfsdk:"name"`
- Owner types.String `tfsdk:"owner"`
- Region types.String `tfsdk:"region"`
-}
+ // Define errors
+ errDatabaseNotFound = errors.New("database not found")
+
+ // Error message constants
+ extractErrorSummary = "extracting failed"
+ extractErrorMessage = "Extracting identity data: %v"
+)
// NewDatabaseResource is a helper function to simplify the provider implementation.
func NewDatabaseResource() resource.Resource {
return &databaseResource{}
}
+// resourceModel describes the resource data model.
+type resourceModel = postgresflexalpha2.DatabaseModel
+
+// DatabaseResourceIdentityModel describes the resource's identity attributes.
+type DatabaseResourceIdentityModel struct {
+ ProjectID types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ InstanceID types.String `tfsdk:"instance_id"`
+ DatabaseID types.Int64 `tfsdk:"database_id"`
+}
+
// databaseResource is the resource implementation.
type databaseResource struct {
client *postgresflexalpha.APIClient
providerData core.ProviderData
}
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
+// ModifyPlan adjusts the plan to set the correct region.
func (r *databaseResource) ModifyPlan(
ctx context.Context,
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
- var configModel Model
+ var configModel resourceModel
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
@@ -75,7 +78,7 @@ func (r *databaseResource) ModifyPlan(
return
}
- var planModel Model
+ var planModel resourceModel
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -117,85 +120,46 @@ func (r *databaseResource) Configure(
tflog.Info(ctx, "Postgres Flex database client configured")
}
+//go:embed planModifiers.yaml
+var modifiersFileByte []byte
+
// Schema defines the schema for the resource.
-func (r *databaseResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- descriptions := map[string]string{
- "main": "Postgres Flex database resource schema. Must have a `region` specified in the provider configuration.",
- "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
- "database_id": "Database ID.",
- "instance_id": "ID of the Postgres Flex instance.",
- "project_id": "STACKIT project ID to which the instance is associated.",
- "name": "Database name.",
- "owner": "Username of the database owner.",
- "region": "The resource region. If not defined, the provider region is used.",
+func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ s := postgresflexalpha2.DatabaseResourceSchema(ctx)
+
+ fields, err := utils.ReadModifiersConfig(modifiersFileByte)
+ if err != nil {
+ resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
+ return
}
- resp.Schema = schema.Schema{
- Description: descriptions["main"],
- Attributes: map[string]schema.Attribute{
- "id": schema.StringAttribute{
- Description: descriptions["id"],
- Computed: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.UseStateForUnknown(),
- },
+ err = utils.AddPlanModifiersToResourceSchema(fields, &s)
+ if err != nil {
+ resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
+ return
+ }
+ resp.Schema = s
+}
+
+// IdentitySchema defines the schema for the resource's identity attributes.
+func (r *databaseResource) IdentitySchema(
+ _ context.Context,
+ _ resource.IdentitySchemaRequest,
+ response *resource.IdentitySchemaResponse,
+) {
+ response.IdentitySchema = identityschema.Schema{
+ Attributes: map[string]identityschema.Attribute{
+ "project_id": identityschema.StringAttribute{
+ RequiredForImport: true,
},
- "database_id": schema.Int64Attribute{
- Description: descriptions["database_id"],
- Computed: true,
- PlanModifiers: []planmodifier.Int64{
- int64planmodifier.UseStateForUnknown(),
- },
- Validators: []validator.Int64{},
+ "region": identityschema.StringAttribute{
+ RequiredForImport: true,
},
- "instance_id": schema.StringAttribute{
- Description: descriptions["instance_id"],
- Required: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- stringplanmodifier.UseStateForUnknown(),
- },
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
+ "instance_id": identityschema.StringAttribute{
+ RequiredForImport: true,
},
- "project_id": schema.StringAttribute{
- Description: descriptions["project_id"],
- Required: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- stringplanmodifier.UseStateForUnknown(),
- },
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
- },
- "name": schema.StringAttribute{
- Description: descriptions["name"],
- Required: true,
- PlanModifiers: []planmodifier.String{},
- Validators: []validator.String{
- stringvalidator.RegexMatches(
- regexp.MustCompile("^[a-z]([a-z0-9]*)?$"),
- "must start with a letter, must have lower case letters or numbers",
- ),
- },
- },
- "owner": schema.StringAttribute{
- Description: descriptions["owner"],
- Required: true,
- PlanModifiers: []planmodifier.String{},
- },
- "region": schema.StringAttribute{
- Optional: true,
- // must be computed to allow for storing the override value from the provider
- Computed: true,
- Description: descriptions["region"],
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
+ "database_id": identityschema.Int64Attribute{
+ RequiredForImport: true,
},
},
}
@@ -207,18 +171,26 @@ func (r *databaseResource) Create(
req resource.CreateRequest,
resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model resourceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // Read identity data
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
ctx = core.InitProviderContext(ctx)
- projectId := model.ProjectId.ValueString()
- region := model.Region.ValueString()
- instanceId := model.InstanceId.ValueString()
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.ProjectID.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
@@ -272,7 +244,7 @@ func (r *databaseResource) Create(
}
// Map response body to schema
- err = mapFields(database, &model, region)
+ err = mapResourceFields(database, &model)
if err != nil {
core.LogAndAddError(
ctx,
@@ -282,9 +254,21 @@ func (r *databaseResource) Create(
)
return
}
+
+ // Set data returned by API in identity
+ identity := DatabaseResourceIdentityModel{
+ ProjectID: types.StringValue(projectId),
+ Region: types.StringValue(region),
+ InstanceID: types.StringValue(instanceId),
+ DatabaseID: types.Int64Value(databaseId),
+ }
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
// Set state to fully populated data
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
+ resp.Diagnostics.Append(resp.State.Set(ctx, model)...)
if resp.Diagnostics.HasError() {
return
}
@@ -297,23 +281,36 @@ func (r *databaseResource) Read(
req resource.ReadRequest,
resp *resource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // Read identity data
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
ctx = core.InitProviderContext(ctx)
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseId := model.DatabaseId.ValueInt64()
- region := r.providerData.GetRegionWithOverride(model.Region)
+ projectId, instanceId, region, databaseId, errExt := r.extractIdentityData(model, identityData)
+ if errExt != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ extractErrorSummary,
+ fmt.Sprintf(extractErrorMessage, errExt),
+ )
+ }
+
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "database_id", databaseId)
databaseResp, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
if err != nil {
@@ -329,7 +326,7 @@ func (r *databaseResource) Read(
ctx = core.LogResponse(ctx)
// Map response body to schema
- err = mapFields(databaseResp, &model, region)
+ err = mapResourceFields(databaseResp, &model)
if err != nil {
core.LogAndAddError(
ctx,
@@ -355,32 +352,45 @@ func (r *databaseResource) Update(
req resource.UpdateRequest,
resp *resource.UpdateResponse,
) {
- var model Model
+ var model resourceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // Read identity data
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
ctx = core.InitProviderContext(ctx)
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseId64 := model.DatabaseId.ValueInt64()
+ projectId, instanceId, region, databaseId64, errExt := r.extractIdentityData(model, identityData)
+ if errExt != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ extractErrorSummary,
+ fmt.Sprintf(extractErrorMessage, errExt),
+ )
+ }
+
if databaseId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
return
}
databaseId := int32(databaseId64)
- region := model.Region.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "database_id", databaseId)
// Retrieve values from state
- var stateModel Model
+ var stateModel resourceModel
diags = req.State.Get(ctx, &stateModel)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -420,7 +430,7 @@ func (r *databaseResource) Update(
ctx = core.LogResponse(ctx)
// Map response body to schema
- err = mapFieldsUpdatePartially(res, &model, region)
+ err = mapResourceFields(res.Database, &model)
if err != nil {
core.LogAndAddError(
ctx,
@@ -445,29 +455,41 @@ func (r *databaseResource) Delete(
req resource.DeleteRequest,
resp *resource.DeleteResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // Read identity data
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
ctx = core.InitProviderContext(ctx)
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseId64 := model.DatabaseId.ValueInt64()
+ projectId, instanceId, region, databaseId64, errExt := r.extractIdentityData(model, identityData)
+ if errExt != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ extractErrorSummary,
+ fmt.Sprintf(extractErrorMessage, errExt),
+ )
+ }
if databaseId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
return
}
databaseId := int32(databaseId64)
- region := model.Region.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "database_id", databaseId)
// Delete existing record set
err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseId)
@@ -481,95 +503,118 @@ func (r *databaseResource) Delete(
}
// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
+// The expected import identifier format is: [project_id],[region],[instance_id],[database_id]
func (r *databaseResource) ImportState(
ctx context.Context,
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- idParts := strings.Split(req.ID, core.Separator)
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[database_id], got %q",
- req.ID,
- ),
+
+ ctx = core.InitProviderContext(ctx)
+
+ if req.ID != "" {
+
+ idParts := strings.Split(req.ID, core.Separator)
+
+ if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing database",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],[instance_id],[database_id], got %q",
+ req.ID,
+ ),
+ )
+ return
+ }
+
+ databaseId, err := strconv.ParseInt(idParts[3], 10, 64)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error importing database",
+ fmt.Sprintf("Invalid database_id format: %q. It must be a valid integer.", idParts[3]),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), databaseId)...)
+
+ core.LogAndAddWarning(
+ ctx,
+ &resp.Diagnostics,
+ "Postgresflex database imported with empty password",
+ "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
)
+
+ tflog.Info(ctx, "Postgres Flex database state imported")
+
return
}
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), idParts[3])...)
- core.LogAndAddWarning(
- ctx,
- &resp.Diagnostics,
- "Postgresflex database imported with empty password",
- "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
- )
+ // If no ID is provided, attempt to read identity attributes from the import configuration
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+ databaseId := identityData.DatabaseID.ValueInt64()
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), databaseId)...)
+
tflog.Info(ctx, "Postgres Flex database state imported")
}
-func mapFields(resp *postgresflexalpha.ListDatabase, model *Model, region string) error {
- if resp == nil {
- return fmt.Errorf("response is nil")
- }
- if resp.Id == nil || *resp.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
-
- var databaseId int64
- if model.DatabaseId.ValueInt64() != 0 {
+// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model.
+func (r *databaseResource) extractIdentityData(
+ model resourceModel,
+ identity DatabaseResourceIdentityModel,
+) (projectId, region, instanceId string, databaseId int64, err error) {
+ if !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() {
databaseId = model.DatabaseId.ValueInt64()
- } else if resp.Id != nil {
- databaseId = *resp.Id
} else {
- return fmt.Errorf("database id not present")
- }
- model.Id = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(databaseId, 10),
- )
- model.DatabaseId = types.Int64Value(databaseId)
- model.Name = types.StringPointerValue(resp.Name)
- model.Region = types.StringValue(region)
- model.Owner = types.StringPointerValue(cleanString(resp.Owner))
- return nil
-}
-
-func mapFieldsUpdatePartially(
- res *postgresflexalpha.UpdateDatabasePartiallyResponse,
- model *Model,
- region string,
-) error {
- if res == nil {
- return fmt.Errorf("response is nil")
- }
- return mapFields(res.Database, model, region)
-}
-
-func cleanString(s *string) *string {
- if s == nil {
- return nil
- }
- res := strings.Trim(*s, "\"")
- return &res
-}
-
-func toCreatePayload(model *Model) (*postgresflexalpha.CreateDatabaseRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
+ if identity.DatabaseID.IsNull() || identity.DatabaseID.IsUnknown() {
+ return "", "", "", 0, fmt.Errorf("database_id not found in config")
+ }
+ databaseId = identity.DatabaseID.ValueInt64()
}
- return &postgresflexalpha.CreateDatabaseRequestPayload{
- Name: model.Name.ValueStringPointer(),
- Owner: model.Owner.ValueStringPointer(),
- }, nil
-}
+ if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
+ projectId = model.ProjectId.ValueString()
+ } else {
+ if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
+ return "", "", "", 0, fmt.Errorf("project_id not found in config")
+ }
+ projectId = identity.ProjectID.ValueString()
+ }
-var errDatabaseNotFound = errors.New("database not found")
+ if !model.Region.IsNull() && !model.Region.IsUnknown() {
+ region = r.providerData.GetRegionWithOverride(model.Region)
+ } else {
+ if identity.Region.IsNull() || identity.Region.IsUnknown() {
+ return "", "", "", 0, fmt.Errorf("region not found in config")
+ }
+ region = r.providerData.GetRegionWithOverride(identity.Region)
+ }
+
+ if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
+ instanceId = model.InstanceId.ValueString()
+ } else {
+ if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
+ return "", "", "", 0, fmt.Errorf("instance_id not found in config")
+ }
+ instanceId = identity.InstanceID.ValueString()
+ }
+ return projectId, region, instanceId, databaseId, nil
+}
diff --git a/stackit/internal/services/postgresflexalpha/database/resource_test.go b/stackit/internal/services/postgresflexalpha/database/resource_test.go
deleted file mode 100644
index 15bced10..00000000
--- a/stackit/internal/services/postgresflexalpha/database/resource_test.go
+++ /dev/null
@@ -1,232 +0,0 @@
-package postgresflexalpha
-
-import (
- "reflect"
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
-)
-
-func TestMapFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *postgresflex.ListDatabase
- region string
- expected Model
- isValid bool
- }{
- {
- "default_values",
- &postgresflex.ListDatabase{
- Id: utils.Ptr(int64(1)),
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
- DatabaseId: types.Int64Value(int64(1)),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringNull(),
- Owner: types.StringNull(),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "simple_values",
- &postgresflex.ListDatabase{
- Id: utils.Ptr(int64(1)),
- Name: utils.Ptr("dbname"),
- Owner: utils.Ptr("username"),
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
- DatabaseId: types.Int64Value(int64(1)),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue("dbname"),
- Owner: types.StringValue("username"),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &postgresflex.ListDatabase{
- Id: utils.Ptr(int64(1)),
- Name: utils.Ptr(""),
- Owner: utils.Ptr(""),
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
- DatabaseId: types.Int64Value(int64(1)),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue(""),
- Owner: types.StringValue(""),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- Model{},
- false,
- },
- {
- "empty_response",
- &postgresflex.ListDatabase{},
- testRegion,
- Model{},
- false,
- },
- {
- "no_resource_id",
- &postgresflex.ListDatabase{
- Id: utils.Ptr(int64(0)),
- Name: utils.Ptr("dbname"),
- Owner: utils.Ptr("username"),
- },
- testRegion,
- Model{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &Model{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- }
- err := mapFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(state, &tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- tests := []struct {
- description string
- input *Model
- expected *postgresflex.CreateDatabaseRequestPayload
- isValid bool
- }{
- {
- "default_values",
- &Model{
- Name: types.StringValue("dbname"),
- Owner: types.StringValue("username"),
- },
- &postgresflex.CreateDatabaseRequestPayload{
- Name: utils.Ptr("dbname"),
- Owner: utils.Ptr("username"),
- },
- true,
- },
- {
- "null_fields",
- &Model{
- Name: types.StringNull(),
- Owner: types.StringNull(),
- },
- &postgresflex.CreateDatabaseRequestPayload{
- Name: nil,
- Owner: nil,
- },
- true,
- },
- {
- "nil_model",
- nil,
- nil,
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- output, err := toCreatePayload(tt.input)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(output, tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func Test_cleanString(t *testing.T) {
- type args struct {
- s *string
- }
- tests := []struct {
- name string
- args args
- want *string
- }{
- {
- name: "simple_value",
- args: args{
- s: utils.Ptr("mytest"),
- },
- want: utils.Ptr("mytest"),
- },
- {
- name: "simple_value_with_quotes",
- args: args{
- s: utils.Ptr("\"mytest\""),
- },
- want: utils.Ptr("mytest"),
- },
- {
- name: "simple_values_with_quotes",
- args: args{
- s: utils.Ptr("\"my test here\""),
- },
- want: utils.Ptr("my test here"),
- },
- {
- name: "simple_values",
- args: args{
- s: utils.Ptr("my test here"),
- },
- want: utils.Ptr("my test here"),
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- if got := cleanString(tt.args.s); !reflect.DeepEqual(got, tt.want) {
- t.Errorf("cleanString() = %v, want %v", got, tt.want)
- }
- })
- }
-}
diff --git a/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go b/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go
index 95f6b6e5..6affc956 100644
--- a/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go
+++ b/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go
@@ -4,6 +4,8 @@ package postgresflexalpha
import (
"context"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
@@ -12,11 +14,23 @@ import (
func DatabaseResourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
+ "database_id": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "The ID of the database.",
+ MarkdownDescription: "The ID of the database.",
+ },
"id": schema.Int64Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
},
+ "instance_id": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
"name": schema.StringAttribute{
Required: true,
Description: "The name of the database.",
@@ -28,12 +42,33 @@ func DatabaseResourceSchema(ctx context.Context) schema.Schema {
Description: "The owner of the database.",
MarkdownDescription: "The owner of the database.",
},
+ "project_id": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
},
}
}
type DatabaseModel struct {
- Id types.Int64 `tfsdk:"id"`
- Name types.String `tfsdk:"name"`
- Owner types.String `tfsdk:"owner"`
+ DatabaseId types.Int64 `tfsdk:"database_id"`
+ Id types.Int64 `tfsdk:"id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Name types.String `tfsdk:"name"`
+ Owner types.String `tfsdk:"owner"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
}
diff --git a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go
index db8fa3bf..90590716 100644
--- a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go
+++ b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go
@@ -12,8 +12,8 @@ type mockRequest struct {
executeFunc func() (*postgresflex.GetFlavorsResponse, error)
}
-func (m *mockRequest) Page(_ int64) postgresflex.ApiGetFlavorsRequestRequest { return m }
-func (m *mockRequest) Size(_ int64) postgresflex.ApiGetFlavorsRequestRequest { return m }
+func (m *mockRequest) Page(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
+func (m *mockRequest) Size(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
func (m *mockRequest) Sort(_ postgresflex.FlavorSort) postgresflex.ApiGetFlavorsRequestRequest {
return m
}
diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasource.go b/stackit/internal/services/postgresflexalpha/flavors/datasource.go
index 26be805b..44483018 100644
--- a/stackit/internal/services/postgresflexalpha/flavors/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/flavors/datasource.go
@@ -21,12 +21,19 @@ func NewFlavorsDataSource() datasource.DataSource {
return &flavorsDataSource{}
}
+// dataSourceModel maps the data source schema data.
+type dataSourceModel = postgresflexalphaGen.FlavorsModel
+
type flavorsDataSource struct {
client *postgresflexalpha.APIClient
providerData core.ProviderData
}
-func (d *flavorsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+func (d *flavorsDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_postgresflexalpha_flavors"
}
@@ -35,7 +42,11 @@ func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaReque
}
// Configure adds the provider configured client to the data source.
-func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+func (d *flavorsDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
var ok bool
d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
@@ -51,7 +62,7 @@ func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.Config
}
func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data postgresflexalphaGen.FlavorsModel
+ var data dataSourceModel
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
index 924d1375..dbfe5cc9 100644
--- a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
@@ -33,7 +33,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The flavor description.",
MarkdownDescription: "The flavor description.",
},
- "id": schema.StringAttribute{
+ "tf_original_api_id": schema.StringAttribute{
Computed: true,
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
diff --git a/stackit/internal/services/postgresflexalpha/instance/datasource.go b/stackit/internal/services/postgresflexalpha/instance/datasource.go
index de0c5c74..95f7904b 100644
--- a/stackit/internal/services/postgresflexalpha/instance/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/instance/datasource.go
@@ -5,6 +5,7 @@ import (
"fmt"
"net/http"
+ "github.com/hashicorp/terraform-plugin-framework/types"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
@@ -26,6 +27,12 @@ func NewInstanceDataSource() datasource.DataSource {
return &instanceDataSource{}
}
+// dataSourceModel maps the data source schema data.
+type dataSourceModel struct {
+ postgresflexalpha2.InstanceModel
+ TerraformID types.String `tfsdk:"id"`
+}
+
// instanceDataSource is the data source implementation.
type instanceDataSource struct {
client *postgresflexalpha.APIClient
@@ -33,12 +40,20 @@ type instanceDataSource struct {
}
// Metadata returns the data source type name.
-func (r *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+func (r *instanceDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_postgresflexalpha_instance"
}
// Configure adds the provider configured client to the data source.
-func (r *instanceDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+func (r *instanceDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
var ok bool
r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
@@ -59,8 +74,12 @@ func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequ
}
// Read refreshes the Terraform state with the latest data.
-func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
- var model postgresflexalpha2.InstanceModel
+func (r *instanceDataSource) Read(
+ ctx context.Context,
+ req datasource.ReadRequest,
+ resp *datasource.ReadResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var model dataSourceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
diff --git a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go
index 5ff386fe..047d0176 100644
--- a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go
@@ -88,7 +88,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
- "id": schema.StringAttribute{
+ "tf_original_api_id": schema.StringAttribute{
Computed: true,
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
@@ -204,7 +204,7 @@ type InstanceModel struct {
ConnectionInfo ConnectionInfoValue `tfsdk:"connection_info"`
Encryption EncryptionValue `tfsdk:"encryption"`
FlavorId types.String `tfsdk:"flavor_id"`
- Id types.String `tfsdk:"id"`
+ Id types.String `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
IsDeletable types.Bool `tfsdk:"is_deletable"`
Name types.String `tfsdk:"name"`
diff --git a/stackit/internal/services/postgresflexalpha/instance/functions.go b/stackit/internal/services/postgresflexalpha/instance/functions.go
index dc29abe4..862f88ff 100644
--- a/stackit/internal/services/postgresflexalpha/instance/functions.go
+++ b/stackit/internal/services/postgresflexalpha/instance/functions.go
@@ -14,26 +14,32 @@ import (
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
-func mapGetInstanceResponseToModel(ctx context.Context, m *postgresflexalpharesource.InstanceModel, resp *postgresflex.GetInstanceResponse) error {
- tflog.Debug(ctx, ">>>> MSH DEBUG <<<<", map[string]interface{}{
- "id": m.Id.ValueString(),
- "instance_id": m.InstanceId.ValueString(),
- "backup_schedule": m.BackupSchedule.ValueString(),
- "flavor_id": m.FlavorId.ValueString(),
- "encryption.kek_key_id": m.Encryption.KekKeyId.ValueString(),
- "encryption.kek_key_ring_id": m.Encryption.KekKeyRingId.ValueString(),
- "encryption.kek_key_version": m.Encryption.KekKeyVersion.ValueString(),
- "encryption.service_account": m.Encryption.ServiceAccount.ValueString(),
- "is_deletable": m.IsDeletable.ValueBool(),
- "name": m.Name.ValueString(),
- "status": m.Status.ValueString(),
- "retention_days": m.RetentionDays.ValueInt64(),
- "replicas": m.Replicas.ValueInt64(),
- "network.instance_address": m.Network.InstanceAddress.ValueString(),
- "network.router_address": m.Network.RouterAddress.ValueString(),
- "version": m.Version.ValueString(),
- "network.acl": m.Network.Acl.String(),
- })
+func mapGetInstanceResponseToModel(
+ ctx context.Context,
+ m *postgresflexalpharesource.InstanceModel,
+ resp *postgresflex.GetInstanceResponse,
+) error {
+ tflog.Debug(
+ ctx, ">>>> MSH DEBUG <<<<", map[string]interface{}{
+ "id": m.Id.ValueString(),
+ "instance_id": m.InstanceId.ValueString(),
+ "backup_schedule": m.BackupSchedule.ValueString(),
+ "flavor_id": m.FlavorId.ValueString(),
+ "encryption.kek_key_id": m.Encryption.KekKeyId.ValueString(),
+ "encryption.kek_key_ring_id": m.Encryption.KekKeyRingId.ValueString(),
+ "encryption.kek_key_version": m.Encryption.KekKeyVersion.ValueString(),
+ "encryption.service_account": m.Encryption.ServiceAccount.ValueString(),
+ "is_deletable": m.IsDeletable.ValueBool(),
+ "name": m.Name.ValueString(),
+ "status": m.Status.ValueString(),
+ "retention_days": m.RetentionDays.ValueInt64(),
+ "replicas": m.Replicas.ValueInt64(),
+ "network.instance_address": m.Network.InstanceAddress.ValueString(),
+ "network.router_address": m.Network.RouterAddress.ValueString(),
+ "version": m.Version.ValueString(),
+ "network.acl": m.Network.Acl.String(),
+ },
+ )
m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
m.Encryption = postgresflexalpharesource.NewEncryptionValueNull()
@@ -61,7 +67,11 @@ func mapGetInstanceResponseToModel(ctx context.Context, m *postgresflexalphareso
m.FlavorId = types.StringValue(resp.GetFlavorId())
if m.Id.IsNull() || m.Id.IsUnknown() {
- m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
+ m.Id = utils.BuildInternalTerraformId(
+ m.ProjectId.ValueString(),
+ m.Region.ValueString(),
+ m.InstanceId.ValueString(),
+ )
}
m.InstanceId = types.StringPointerValue(resp.Id)
@@ -121,7 +131,11 @@ func mapGetInstanceResponseToModel(ctx context.Context, m *postgresflexalphareso
return nil
}
-func mapGetDataInstanceResponseToModel(ctx context.Context, m *postgresflexalphadatasource.InstanceModel, resp *postgresflex.GetInstanceResponse) error {
+func mapGetDataInstanceResponseToModel(
+ ctx context.Context,
+ m *dataSourceModel,
+ resp *postgresflex.GetInstanceResponse,
+) error {
m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
handleEncryption(m, resp)
m.ConnectionInfo.Host = types.StringValue(resp.ConnectionInfo.GetHost())
@@ -155,7 +169,7 @@ func mapGetDataInstanceResponseToModel(ctx context.Context, m *postgresflexalpha
return nil
}
-func handleNetwork(ctx context.Context, m *postgresflexalphadatasource.InstanceModel, resp *postgresflex.GetInstanceResponse) error {
+func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) error {
netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
if diags.HasError() {
return fmt.Errorf("failed converting network acl from response")
@@ -187,7 +201,7 @@ func handleNetwork(ctx context.Context, m *postgresflexalphadatasource.InstanceM
return nil
}
-func handleEncryption(m *postgresflexalphadatasource.InstanceModel, resp *postgresflex.GetInstanceResponse) {
+func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
keyId := ""
if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
keyId = keyIdVal
diff --git a/stackit/internal/services/postgresflexalpha/instance/resource.go b/stackit/internal/services/postgresflexalpha/instance/resource.go
index f061f8bf..78bb0572 100644
--- a/stackit/internal/services/postgresflexalpha/instance/resource.go
+++ b/stackit/internal/services/postgresflexalpha/instance/resource.go
@@ -23,8 +23,6 @@ import (
wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
)
-const packageName = "postgresflexalpha"
-
// Ensure the implementation satisfies the expected interfaces.
var (
_ resource.Resource = &instanceResource{}
@@ -40,11 +38,8 @@ func NewInstanceResource() resource.Resource {
return &instanceResource{}
}
-// instanceResource is the resource implementation.
-type instanceResource struct {
- client *postgresflex.APIClient
- providerData core.ProviderData
-}
+// resourceModel describes the resource data model.
+type resourceModel = postgresflexalpha.InstanceModel
type InstanceResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
@@ -52,8 +47,18 @@ type InstanceResourceIdentityModel struct {
InstanceID types.String `tfsdk:"instance_id"`
}
-func (r *instanceResource) ValidateConfig(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) {
- var data postgresflexalpha.InstanceModel
+// instanceResource is the resource implementation.
+type instanceResource struct {
+ client *postgresflex.APIClient
+ providerData core.ProviderData
+}
+
+func (r *instanceResource) ValidateConfig(
+ ctx context.Context,
+ req resource.ValidateConfigRequest,
+ resp *resource.ValidateConfigResponse,
+) {
+ var data resourceModel
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
if resp.Diagnostics.HasError() {
@@ -72,8 +77,12 @@ func (r *instanceResource) ValidateConfig(ctx context.Context, req resource.Vali
// ModifyPlan implements resource.ResourceWithModifyPlan.
// Use the modifier to set the effective region in the current plan.
-func (r *instanceResource) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) { // nolint:gocritic // function signature required by Terraform
- var configModel postgresflexalpha.InstanceModel
+func (r *instanceResource) ModifyPlan(
+ ctx context.Context,
+ req resource.ModifyPlanRequest,
+ resp *resource.ModifyPlanResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var configModel resourceModel
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
@@ -83,7 +92,7 @@ func (r *instanceResource) ModifyPlan(ctx context.Context, req resource.ModifyPl
return
}
- var planModel postgresflexalpha.InstanceModel
+ var planModel resourceModel
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -135,13 +144,13 @@ var modifiersFileByte []byte
// Schema defines the schema for the resource.
func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
schema := postgresflexalpha.InstanceResourceSchema(ctx)
- fields, err := postgresflexUtils.ReadModifiersConfig(modifiersFileByte)
+ fields, err := utils.ReadModifiersConfig(modifiersFileByte)
if err != nil {
resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
return
}
- err = postgresflexUtils.AddPlanModifiersToResourceSchema(fields, &schema)
+ err = utils.AddPlanModifiersToResourceSchema(fields, &schema)
if err != nil {
resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
return
@@ -149,7 +158,11 @@ func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest,
resp.Schema = schema
}
-func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
+func (r *instanceResource) IdentitySchema(
+ _ context.Context,
+ _ resource.IdentitySchemaRequest,
+ resp *resource.IdentitySchemaResponse,
+) {
resp.IdentitySchema = identityschema.Schema{
Attributes: map[string]identityschema.Attribute{
"project_id": identityschema.StringAttribute{
@@ -171,7 +184,7 @@ func (r *instanceResource) Create(
req resource.CreateRequest,
resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model postgresflexalpha.InstanceModel
+ var model resourceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
@@ -201,7 +214,11 @@ func (r *instanceResource) Create(
payload := modelToCreateInstancePayload(netAcl, model, replVal)
// Create new instance
- createResp, err := r.client.CreateInstanceRequest(ctx, projectId, region).CreateInstanceRequestPayload(payload).Execute()
+ createResp, err := r.client.CreateInstanceRequest(
+ ctx,
+ projectId,
+ region,
+ ).CreateInstanceRequestPayload(payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "error creating instance", fmt.Sprintf("Calling API: %v", err))
return
@@ -227,13 +244,23 @@ func (r *instanceResource) Create(
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).WaitWithContext(ctx)
if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating instance", fmt.Sprintf("Wait handler error: %v", err))
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating instance",
+ fmt.Sprintf("Wait handler error: %v", err),
+ )
return
}
err = mapGetInstanceResponseToModel(ctx, &model, waitResp)
if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating instance", fmt.Sprintf("Error creating model: %v", err))
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating instance",
+ fmt.Sprintf("Error creating model: %v", err),
+ )
return
}
@@ -246,7 +273,11 @@ func (r *instanceResource) Create(
tflog.Info(ctx, "Postgres Flex instance created")
}
-func modelToCreateInstancePayload(netAcl []string, model postgresflexalpha.InstanceModel, replVal int32) postgresflex.CreateInstanceRequestPayload {
+func modelToCreateInstancePayload(
+ netAcl []string,
+ model postgresflexalpha.InstanceModel,
+ replVal int32,
+) postgresflex.CreateInstanceRequestPayload {
var enc *postgresflex.InstanceEncryption
if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
enc = &postgresflex.InstanceEncryption{
@@ -279,10 +310,14 @@ func modelToCreateInstancePayload(netAcl []string, model postgresflexalpha.Insta
}
// Read refreshes the Terraform state with the latest data.
-func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
+func (r *instanceResource) Read(
+ ctx context.Context,
+ req resource.ReadRequest,
+ resp *resource.ReadResponse,
+) { // nolint:gocritic // function signature required by Terraform
functionErrorSummary := "read instance failed"
- var model postgresflexalpha.InstanceModel
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -371,7 +406,12 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r
err = mapGetInstanceResponseToModel(ctx, &model, instanceResp)
if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, functionErrorSummary, fmt.Sprintf("Processing API payload: %v", err))
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ functionErrorSummary,
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
return
}
@@ -396,8 +436,12 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r
}
// Update updates the resource and sets the updated Terraform state on success.
-func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { // nolint:gocritic // function signature required by Terraform
- var model postgresflexalpha.InstanceModel
+func (r *instanceResource) Update(
+ ctx context.Context,
+ req resource.UpdateRequest,
+ resp *resource.UpdateResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var model resourceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
@@ -475,15 +519,31 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques
ctx = core.LogResponse(ctx)
- waitResp, err := wait.PartialUpdateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).WaitWithContext(ctx)
+ waitResp, err := wait.PartialUpdateInstanceWaitHandler(
+ ctx,
+ r.client,
+ projectId,
+ region,
+ instanceId,
+ ).WaitWithContext(ctx)
if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", fmt.Sprintf("Instance update waiting: %v", err))
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error updating instance",
+ fmt.Sprintf("Instance update waiting: %v", err),
+ )
return
}
err = mapGetInstanceResponseToModel(ctx, &model, waitResp)
if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", fmt.Sprintf("Processing API payload: %v", err))
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error updating instance",
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
return
}
@@ -496,8 +556,12 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques
}
// Delete deletes the resource and removes the Terraform state on success.
-func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { // nolint:gocritic // function signature required by Terraform
- var model postgresflexalpha.InstanceModel
+func (r *instanceResource) Delete(
+ ctx context.Context,
+ req resource.DeleteRequest,
+ resp *resource.DeleteResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
@@ -538,16 +602,24 @@ func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteReques
// ImportState imports a resource into the Terraform state on success.
// The expected format of the resource import identifier is: project_id,region,instance_id
-func (r *instanceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+func (r *instanceResource) ImportState(
+ ctx context.Context,
+ req resource.ImportStateRequest,
+ resp *resource.ImportStateResponse,
+) {
ctx = core.InitProviderContext(ctx)
if req.ID != "" {
idParts := strings.Split(req.ID, core.Separator)
if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
- core.LogAndAddError(ctx, &resp.Diagnostics,
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
"Error importing instance",
- fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID),
+ fmt.Sprintf(
+ "Expected import identifier with format: [project_id],[region],[instance_id] Got: %q",
+ req.ID,
+ ),
)
return
}
@@ -558,25 +630,20 @@ func (r *instanceResource) ImportState(ctx context.Context, req resource.ImportS
return
}
+ // If no ID is provided, attempt to read identity attributes from the import configuration
var identityData InstanceResourceIdentityModel
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
if resp.Diagnostics.HasError() {
return
}
- resp.Diagnostics.Append(
- resp.State.SetAttribute(
- ctx,
- path.Root("id"),
- utils.BuildInternalTerraformId(
- identityData.ProjectID.ValueString(),
- identityData.Region.ValueString(),
- identityData.InstanceID.ValueString(),
- ),
- )...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...)
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
tflog.Info(ctx, "Postgres Flex instance state imported")
}
diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go
index 2a8a12b5..fbc442e4 100644
--- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go
+++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go
@@ -164,6 +164,11 @@ func TestAccPostgresFlexFlexResource(t *testing.T) {
Steps: []resource.TestStep{
// Creation
{
+ // testdata/
+ // ConfigDirectory: config.TestNameDirectory(),
+
+ // testdata//
+ // ConfigDirectory: config.TestStepDirectory(),
Config: configResources(instanceResource["backup_schedule"], &testutil.Region),
Check: resource.ComposeAggregateTestCheckFunc(
// Instance
diff --git a/stackit/internal/services/postgresflexalpha/user/datasource.go b/stackit/internal/services/postgresflexalpha/user/datasource.go
index 70d05aba..b0cf9d3b 100644
--- a/stackit/internal/services/postgresflexalpha/user/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/user/datasource.go
@@ -5,22 +5,19 @@ import (
"fmt"
"math"
"net/http"
- "strconv"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
- "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
)
// Ensure the implementation satisfies the expected interfaces.
@@ -28,25 +25,17 @@ var (
_ datasource.DataSource = &userDataSource{}
)
-type DataSourceModel struct {
- Id types.String `tfsdk:"id"` // needed by TF
- UserId types.Int64 `tfsdk:"user_id"`
- InstanceId types.String `tfsdk:"instance_id"`
- ProjectId types.String `tfsdk:"project_id"`
- Username types.String `tfsdk:"username"`
- Roles types.Set `tfsdk:"roles"`
- Host types.String `tfsdk:"host"`
- Port types.Int64 `tfsdk:"port"`
- Region types.String `tfsdk:"region"`
- Status types.String `tfsdk:"status"`
- ConnectionString types.String `tfsdk:"connection_string"`
-}
-
// NewUserDataSource is a helper function to simplify the provider implementation.
func NewUserDataSource() datasource.DataSource {
return &userDataSource{}
}
+// dataSourceModel maps the data source schema data.
+type dataSourceModel struct {
+ postgresflexalpha.UserModel
+ TerraformID types.String `tfsdk:"id"`
+}
+
// userDataSource is the data source implementation.
type userDataSource struct {
client *postgresflex.APIClient
@@ -83,84 +72,16 @@ func (r *userDataSource) Configure(
}
// Schema defines the schema for the data source.
-func (r *userDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- descriptions := map[string]string{
- "main": "Postgres Flex user data source schema. Must have a `region` specified in the provider configuration.",
- "id": "Terraform's internal data source. ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
- "user_id": "User ID.",
- "instance_id": "ID of the PostgresFlex instance.",
- "project_id": "STACKIT project ID to which the instance is associated.",
- "username": "The name of the user.",
- "roles": "The roles assigned to the user.",
- "host": "The host address for the user to connect to the instance.",
- "port": "The port number for the user to connect to the instance.",
- "region": "The resource region. If not defined, the provider region is used.",
- "status": "The current status of the user.",
- "connection_string": "The connection string for the user to the instance.",
+func (r *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ s := postgresflexalpha.UserDataSourceSchema(ctx)
+ s.Attributes["id"] = schema.StringAttribute{
+ Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," +
+ "`user_id`\\\".\",",
+ Optional: true,
+ Computed: true,
}
- resp.Schema = schema.Schema{
- Description: descriptions["main"],
- Attributes: map[string]schema.Attribute{
- "id": schema.StringAttribute{
- Description: descriptions["id"],
- Computed: true,
- },
- "user_id": schema.StringAttribute{
- Description: descriptions["user_id"],
- Required: true,
- Validators: []validator.String{
- validate.NoSeparator(),
- },
- },
- "instance_id": schema.StringAttribute{
- Description: descriptions["instance_id"],
- Required: true,
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
- },
- "project_id": schema.StringAttribute{
- Description: descriptions["project_id"],
- Required: true,
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
- },
- "username": schema.StringAttribute{
- Description: descriptions["username"],
- Computed: true,
- },
- "roles": schema.SetAttribute{
- Description: descriptions["roles"],
- ElementType: types.StringType,
- Computed: true,
- },
- "host": schema.StringAttribute{
- Description: descriptions["host"],
- Computed: true,
- },
- "port": schema.Int64Attribute{
- Description: descriptions["port"],
- Computed: true,
- },
- "region": schema.StringAttribute{
- // the region cannot be found automatically, so it has to be passed
- Optional: true,
- Description: descriptions["region"],
- },
- "status": schema.StringAttribute{
- Description: descriptions["status"],
- Computed: true,
- },
- "connection_string": schema.StringAttribute{
- Description: descriptions["connection_string"],
- Computed: true,
- },
- },
- }
+ resp.Schema = s
}
// Read refreshes the Terraform state with the latest data.
@@ -169,7 +90,7 @@ func (r *userDataSource) Read(
req datasource.ReadRequest,
resp *datasource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model DataSourceModel
+ var model dataSourceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -190,26 +111,12 @@ func (r *userDataSource) Read(
region := r.providerData.GetRegionWithOverride(model.Region)
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "user_id", userId)
recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading user",
- fmt.Sprintf(
- "User with ID %q or instance with ID %q does not exist in project %q.",
- userId,
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
+ handleReadError(ctx, &diags, err, projectId, instanceId, userId)
resp.State.RemoveResource(ctx)
return
}
@@ -237,47 +144,38 @@ func (r *userDataSource) Read(
tflog.Info(ctx, "Postgres Flex user read")
}
-func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *DataSourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- var userId int64
- if model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != nil {
- userId = *user.Id
- } else {
- return fmt.Errorf("user id not present")
- }
-
- model.Id = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
+// handleReadError centralizes API error handling for the Read operation.
+func handleReadError(
+ ctx context.Context,
+ diags *diag.Diagnostics,
+ err error,
+ projectId, instanceId string,
+ userId int32,
+) {
+ utils.LogError(
+ ctx,
+ diags,
+ err,
+ "Reading user",
+ fmt.Sprintf(
+ "User with ID %q or instance with ID %q does not exist in project %q.",
+ userId,
+ instanceId,
+ projectId,
+ ),
+ map[int]string{
+ http.StatusBadRequest: fmt.Sprintf(
+ "Invalid user request parameters for project %q and instance %q.",
+ projectId,
+ instanceId,
+ ),
+ http.StatusNotFound: fmt.Sprintf(
+ "User, instance %q, or project %q or user %q not found.",
+ instanceId,
+ projectId,
+ userId,
+ ),
+ http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
+ },
)
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringPointerValue(user.Name)
-
- if user.Roles == nil {
- model.Roles = types.SetNull(types.StringType)
- } else {
- var roles []attr.Value
- for _, role := range *user.Roles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = rolesSet
- }
- model.Host = types.StringPointerValue(user.Host)
- model.Port = types.Int64PointerValue(user.Port)
- model.Region = types.StringValue(region)
- model.Status = types.StringPointerValue(user.Status)
- model.ConnectionString = types.StringPointerValue(user.ConnectionString)
- return nil
}
diff --git a/stackit/internal/services/postgresflexalpha/user/datasource_test.go b/stackit/internal/services/postgresflexalpha/user/datasource_test.go
deleted file mode 100644
index 679bef85..00000000
--- a/stackit/internal/services/postgresflexalpha/user/datasource_test.go
+++ /dev/null
@@ -1,146 +0,0 @@
-package postgresflexalpha
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
-)
-
-func TestMapDataSourceFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *postgresflexalpha.GetUserResponse
- region string
- expected DataSourceModel
- isValid bool
- }{
- {
- "default_values",
- &postgresflexalpha.GetUserResponse{},
- testRegion,
- DataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.SetNull(types.StringType),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "simple_values",
- &postgresflexalpha.GetUserResponse{
- Roles: &[]postgresflexalpha.UserRole{
- "role_1",
- "role_2",
- "",
- },
- Name: utils.Ptr("username"),
- Host: utils.Ptr("host"),
- Port: utils.Ptr(int64(1234)),
- },
- testRegion,
- DataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- types.StringValue(""),
- },
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringNull(),
- ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &postgresflexalpha.GetUserResponse{
- Id: utils.Ptr(int64(1)),
- Roles: &[]postgresflexalpha.UserRole{},
- Name: nil,
- Host: nil,
- Port: utils.Ptr(int64(2123456789)),
- Status: utils.Ptr("status"),
- ConnectionString: utils.Ptr("connection_string"),
- },
- testRegion,
- DataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.SetValueMust(types.StringType, []attr.Value{}),
- Host: types.StringNull(),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("status"),
- ConnectionString: types.StringValue("connection_string"),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- DataSourceModel{},
- false,
- },
- {
- "nil_response_2",
- &postgresflexalpha.GetUserResponse{},
- testRegion,
- DataSourceModel{},
- false,
- },
- {
- "no_resource_id",
- &postgresflexalpha.GetUserResponse{},
- testRegion,
- DataSourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &DataSourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapDataSourceFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(state, &tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go
index fb2a7644..29a7cca0 100644
--- a/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go
@@ -14,17 +14,7 @@ import (
func UserDataSourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
- "connection_string": schema.StringAttribute{
- Computed: true,
- Description: "The connection string for the user to the instance.",
- MarkdownDescription: "The connection string for the user to the instance.",
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: "The host of the instance in which the user belongs to.",
- MarkdownDescription: "The host of the instance in which the user belongs to.",
- },
- "id": schema.Int64Attribute{
+ "tf_original_api_id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@@ -39,11 +29,6 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The name of the user.",
MarkdownDescription: "The name of the user.",
},
- "port": schema.Int64Attribute{
- Computed: true,
- Description: "The port of the instance in which the user belongs to.",
- MarkdownDescription: "The port of the instance in which the user belongs to.",
- },
"project_id": schema.StringAttribute{
Required: true,
Description: "The STACKIT project ID.",
@@ -80,15 +65,12 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
}
type UserModel struct {
- ConnectionString types.String `tfsdk:"connection_string"`
- Host types.String `tfsdk:"host"`
- Id types.Int64 `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Name types.String `tfsdk:"name"`
- Port types.Int64 `tfsdk:"port"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Roles types.List `tfsdk:"roles"`
- Status types.String `tfsdk:"status"`
- UserId types.Int64 `tfsdk:"user_id"`
+ Id types.Int64 `tfsdk:"tf_original_api_id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Name types.String `tfsdk:"name"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Roles types.List `tfsdk:"roles"`
+ Status types.String `tfsdk:"status"`
+ UserId types.Int64 `tfsdk:"user_id"`
}
diff --git a/stackit/internal/services/postgresflexalpha/user/mapper.go b/stackit/internal/services/postgresflexalpha/user/mapper.go
new file mode 100644
index 00000000..2445cb16
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/user/mapper.go
@@ -0,0 +1,142 @@
+package postgresflexalpha
+
+import (
+ "fmt"
+ "strconv"
+
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+)
+
+// mapDataSourceFields maps API response to data source model, preserving existing ID.
+func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSourceModel, region string) error {
+ if userResp == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if model == nil {
+ return fmt.Errorf("model input is nil")
+ }
+ user := userResp
+
+ var userId int64
+ if model.UserId.ValueInt64() != 0 {
+ userId = model.UserId.ValueInt64()
+ } else if user.Id != nil {
+ userId = *user.Id
+ } else {
+ return fmt.Errorf("user id not present")
+ }
+
+ model.TerraformID = utils.BuildInternalTerraformId(
+ model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
+ )
+
+ model.UserId = types.Int64Value(userId)
+ model.Name = types.StringValue(user.GetName())
+
+ if user.Roles == nil {
+ model.Roles = types.List(types.SetNull(types.StringType))
+ } else {
+ var roles []attr.Value
+ for _, role := range *user.Roles {
+ roles = append(roles, types.StringValue(string(role)))
+ }
+ rolesSet, diags := types.SetValue(types.StringType, roles)
+ if diags.HasError() {
+ return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
+ }
+ model.Roles = types.List(rolesSet)
+ }
+
+ model.Id = types.Int64Value(userId)
+ model.Region = types.StringValue(region)
+ model.Status = types.StringValue(user.GetStatus())
+ return nil
+}
+
+// toPayloadRoles converts a string slice to the API's role type.
+func toPayloadRoles(roles *[]string) *[]postgresflex.UserRole {
+ var userRoles = make([]postgresflex.UserRole, 0, len(*roles))
+ for _, role := range *roles {
+ userRoles = append(userRoles, postgresflex.UserRole(role))
+ }
+ return &userRoles
+}
+
+// toUpdatePayload creates an API update payload from the resource model.
+func toUpdatePayload(model *resourceModel, roles *[]string) (
+ *postgresflex.UpdateUserRequestPayload,
+ error,
+) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+ if roles == nil {
+ return nil, fmt.Errorf("nil roles")
+ }
+
+ return &postgresflex.UpdateUserRequestPayload{
+ Name: conversion.StringValueToPointer(model.Name),
+ Roles: toPayloadRoles(roles),
+ }, nil
+}
+
+// toCreatePayload creates an API create payload from the resource model.
+func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.CreateUserRequestPayload, error) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+ if roles == nil {
+ return nil, fmt.Errorf("nil roles")
+ }
+
+ return &postgresflex.CreateUserRequestPayload{
+ Roles: toPayloadRoles(roles),
+ Name: conversion.StringValueToPointer(model.Name),
+ }, nil
+}
+
+// mapResourceFields maps API response to the resource model, preserving existing ID.
+func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceModel, region string) error {
+ if userResp == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if model == nil {
+ return fmt.Errorf("model input is nil")
+ }
+ user := userResp
+
+ var userId int64
+ if model.UserId.ValueInt64() != 0 {
+ userId = model.UserId.ValueInt64()
+ } else if user.Id != nil {
+ userId = *user.Id
+ } else {
+ return fmt.Errorf("user id not present")
+ }
+
+ model.Id = types.Int64Value(userId)
+ model.UserId = types.Int64Value(userId)
+ model.Name = types.StringPointerValue(user.Name)
+
+ if user.Roles == nil {
+ model.Roles = types.List(types.SetNull(types.StringType))
+ } else {
+ var roles []attr.Value
+ for _, role := range *user.Roles {
+ roles = append(roles, types.StringValue(string(role)))
+ }
+ rolesSet, diags := types.SetValue(types.StringType, roles)
+ if diags.HasError() {
+ return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
+ }
+ model.Roles = types.List(rolesSet)
+ }
+ model.Region = types.StringValue(region)
+ model.Status = types.StringPointerValue(user.Status)
+ return nil
+}
diff --git a/stackit/internal/services/postgresflexalpha/user/mapper_test.go b/stackit/internal/services/postgresflexalpha/user/mapper_test.go
new file mode 100644
index 00000000..6eeff9f0
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/user/mapper_test.go
@@ -0,0 +1,569 @@
+package postgresflexalpha
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
+)
+
+func TestMapDataSourceFields(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *postgresflex.GetUserResponse
+ region string
+ expected dataSourceModel
+ isValid bool
+ }{
+ {
+ "default_values",
+ &postgresflex.GetUserResponse{},
+ testRegion,
+ dataSourceModel{
+ UserModel: data.UserModel{
+ Id: types.Int64Value(1),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringValue(""),
+ Roles: types.List(types.SetNull(types.StringType)),
+ Status: types.StringValue(""),
+ Region: types.StringValue(testRegion),
+ },
+ TerraformID: types.StringValue("pid,region,iid,1"),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &postgresflex.GetUserResponse{
+ Roles: &[]postgresflex.UserRole{
+ "role_1",
+ "role_2",
+ "",
+ },
+ Name: utils.Ptr("username"),
+ },
+ testRegion,
+ dataSourceModel{
+
+ UserModel: data.UserModel{
+ Id: types.Int64Value(1),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringValue("username"),
+ Roles: types.List(
+ types.SetValueMust(
+ types.StringType, []attr.Value{
+ types.StringValue("role_1"),
+ types.StringValue("role_2"),
+ types.StringValue(""),
+ },
+ ),
+ ),
+ Region: types.StringValue(testRegion),
+ Status: types.StringValue(""),
+ },
+ TerraformID: types.StringValue("pid,region,iid,1"),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &postgresflex.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Roles: &[]postgresflex.UserRole{},
+ Name: nil,
+ Status: utils.Ptr("status"),
+ },
+ testRegion,
+ dataSourceModel{
+ UserModel: data.UserModel{
+ Id: types.Int64Value(1),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringValue(""),
+ Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
+ Region: types.StringValue(testRegion),
+ Status: types.StringValue("status"),
+ },
+ TerraformID: types.StringValue("pid,region,iid,1"),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ dataSourceModel{},
+ false,
+ },
+ {
+ "nil_response_2",
+ &postgresflex.GetUserResponse{},
+ testRegion,
+ dataSourceModel{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &postgresflex.GetUserResponse{},
+ testRegion,
+ dataSourceModel{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &dataSourceModel{
+ UserModel: data.UserModel{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ UserId: tt.expected.UserId,
+ },
+ }
+ err := mapDataSourceFields(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestMapFieldsCreate(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *postgresflex.GetUserResponse
+ region string
+ expected resourceModel
+ isValid bool
+ }{
+ {
+ "default_values",
+ &postgresflex.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ },
+ testRegion,
+ resourceModel{
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringNull(),
+ Roles: types.List(types.SetNull(types.StringType)),
+ Password: types.StringNull(),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &postgresflex.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Name: utils.Ptr("username"),
+ Status: utils.Ptr("status"),
+ },
+ testRegion,
+ resourceModel{
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringValue("username"),
+ Roles: types.List(types.SetNull(types.StringType)),
+ Password: types.StringNull(),
+ Region: types.StringValue(testRegion),
+ Status: types.StringValue("status"),
+ ConnectionString: types.StringValue("connection_string"),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &postgresflex.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Name: nil,
+ Status: nil,
+ },
+ testRegion,
+ resourceModel{
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringNull(),
+ Roles: types.List(types.SetNull(types.StringType)),
+ Password: types.StringNull(),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ resourceModel{},
+ false,
+ },
+ {
+ "nil_response_2",
+ &postgresflex.GetUserResponse{},
+ testRegion,
+ resourceModel{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &postgresflex.GetUserResponse{},
+ testRegion,
+ resourceModel{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &resourceModel{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ }
+
+ err := mapResourceFields(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestMapFields(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *postgresflex.GetUserResponse
+ region string
+ expected resourceModel
+ isValid bool
+ }{
+ {
+ "default_values",
+ &postgresflex.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ },
+ testRegion,
+ resourceModel{
+ Id: types.Int64Value(1),
+ UserId: types.Int64Value(int64(1)),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringNull(),
+ Roles: types.List(types.SetNull(types.StringType)),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &postgresflex.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Roles: &[]postgresflex.UserRole{
+ "role_1",
+ "role_2",
+ "",
+ },
+ Name: utils.Ptr("username"),
+ },
+ testRegion,
+ resourceModel{
+ Id: types.Int64Value(1),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringValue("username"),
+ Roles: types.List(
+ types.SetValueMust(
+ types.StringType, []attr.Value{
+ types.StringValue("role_1"),
+ types.StringValue("role_2"),
+ types.StringValue(""),
+ },
+ ),
+ ),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &postgresflex.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Name: nil,
+ },
+ testRegion,
+ resourceModel{
+ Id: types.Int64Value(1),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringNull(),
+ Roles: types.List(types.SetNull(types.StringType)),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ resourceModel{},
+ false,
+ },
+ {
+ "nil_response_2",
+ &postgresflex.GetUserResponse{},
+ testRegion,
+ resourceModel{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &postgresflex.GetUserResponse{},
+ testRegion,
+ resourceModel{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &resourceModel{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ }
+ err := mapResourceFields(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestToCreatePayload(t *testing.T) {
+ tests := []struct {
+ description string
+ input *resourceModel
+ inputRoles *[]string
+ expected *postgresflex.CreateUserRequestPayload
+ isValid bool
+ }{
+ {
+ "default_values",
+ &resourceModel{},
+ &[]string{},
+ &postgresflex.CreateUserRequestPayload{
+ Name: nil,
+ Roles: &[]postgresflex.UserRole{},
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &resourceModel{
+ Name: types.StringValue("username"),
+ },
+ &[]string{
+ "role_1",
+ "role_2",
+ },
+ &postgresflex.CreateUserRequestPayload{
+ Name: utils.Ptr("username"),
+ Roles: &[]postgresflex.UserRole{
+ "role_1",
+ "role_2",
+ },
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &resourceModel{
+ Name: types.StringNull(),
+ },
+ &[]string{
+ "",
+ },
+ &postgresflex.CreateUserRequestPayload{
+ Roles: &[]postgresflex.UserRole{
+ "",
+ },
+ Name: nil,
+ },
+ true,
+ },
+ {
+ "nil_model",
+ nil,
+ &[]string{},
+ nil,
+ false,
+ },
+ {
+ "nil_roles",
+ &resourceModel{},
+ nil,
+ nil,
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ output, err := toCreatePayload(tt.input, tt.inputRoles)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(output, tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestToUpdatePayload(t *testing.T) {
+ tests := []struct {
+ description string
+ input *resourceModel
+ inputRoles *[]string
+ expected *postgresflex.UpdateUserRequestPayload
+ isValid bool
+ }{
+ {
+ "default_values",
+ &resourceModel{},
+ &[]string{},
+ &postgresflex.UpdateUserRequestPayload{
+ Roles: &[]postgresflex.UserRole{},
+ },
+ true,
+ },
+ {
+ "default_values",
+ &resourceModel{
+ Name: types.StringValue("username"),
+ },
+ &[]string{
+ "role_1",
+ "role_2",
+ },
+ &postgresflex.UpdateUserRequestPayload{
+ Name: utils.Ptr("username"),
+ Roles: &[]postgresflex.UserRole{
+ "role_1",
+ "role_2",
+ },
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &resourceModel{
+ Name: types.StringNull(),
+ },
+ &[]string{
+ "",
+ },
+ &postgresflex.UpdateUserRequestPayload{
+ Roles: &[]postgresflex.UserRole{
+ "",
+ },
+ },
+ true,
+ },
+ {
+ "nil_model",
+ nil,
+ &[]string{},
+ nil,
+ false,
+ },
+ {
+ "nil_roles",
+ &resourceModel{},
+ nil,
+ nil,
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ output, err := toUpdatePayload(tt.input, tt.inputRoles)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(output, tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
diff --git a/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml b/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml
new file mode 100644
index 00000000..a7d4cde6
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml
@@ -0,0 +1,55 @@
+fields:
+ - name: 'id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'user_id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'instance_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'project_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'name'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'roles'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'password'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'host'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'port'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'region'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'status'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'connection_string'
+ modifiers:
+ - 'UseStateForUnknown'
diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go
index 4df9577d..b7698986 100644
--- a/stackit/internal/services/postgresflexalpha/user/resource.go
+++ b/stackit/internal/services/postgresflexalpha/user/resource.go
@@ -2,6 +2,7 @@ package postgresflexalpha
import (
"context"
+ _ "embed"
"errors"
"fmt"
"math"
@@ -9,60 +10,53 @@ import (
"strconv"
"strings"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
-
- "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
)
-// Ensure the implementation satisfies the expected interfaces.
var (
+ // Ensure the implementation satisfies the expected interfaces.
_ resource.Resource = &userResource{}
_ resource.ResourceWithConfigure = &userResource{}
_ resource.ResourceWithImportState = &userResource{}
_ resource.ResourceWithModifyPlan = &userResource{}
-)
+ _ resource.ResourceWithIdentity = &userResource{}
-type Model struct {
- Id types.String `tfsdk:"id"` // needed by TF
- UserId types.Int64 `tfsdk:"user_id"`
- InstanceId types.String `tfsdk:"instance_id"`
- ProjectId types.String `tfsdk:"project_id"`
- Username types.String `tfsdk:"username"`
- Roles types.Set `tfsdk:"roles"`
- Password types.String `tfsdk:"password"`
- Host types.String `tfsdk:"host"`
- Port types.Int64 `tfsdk:"port"`
- Region types.String `tfsdk:"region"`
- Status types.String `tfsdk:"status"`
- ConnectionString types.String `tfsdk:"connection_string"`
-}
+ // Error message constants
+ extractErrorSummary = "extracting failed"
+ extractErrorMessage = "Extracting identity data: %v"
+)
// NewUserResource is a helper function to simplify the provider implementation.
func NewUserResource() resource.Resource {
return &userResource{}
}
-// userResource is the resource implementation.
+// resourceModel represents the Terraform resource state for a PostgreSQL Flex user.
+type resourceModel = postgresflexalpha.UserModel
+
+// UserResourceIdentityModel describes the resource's identity attributes.
+type UserResourceIdentityModel struct {
+ ProjectID types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ InstanceID types.String `tfsdk:"instance_id"`
+ UserID types.Int64 `tfsdk:"database_id"`
+}
+
+// userResource implements the resource handling for a PostgreSQL Flex user.
type userResource struct {
client *postgresflex.APIClient
providerData core.ProviderData
@@ -75,7 +69,7 @@ func (r *userResource) ModifyPlan(
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
- var configModel Model
+ var configModel resourceModel
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
@@ -85,7 +79,7 @@ func (r *userResource) ModifyPlan(
return
}
- var planModel Model
+ var planModel resourceModel
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -123,117 +117,25 @@ func (r *userResource) Configure(ctx context.Context, req resource.ConfigureRequ
tflog.Info(ctx, "Postgres Flex user client configured")
}
+//go:embed planModifiers.yaml
+var modifiersFileByte []byte
+
// Schema defines the schema for the resource.
-func (r *userResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- rolesOptions := []string{"login", "createdb", "createrole"}
+func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ s := postgresflexalpha.UserResourceSchema(ctx)
- descriptions := map[string]string{
- "main": "Postgres Flex user resource schema. Must have a `region` specified in the provider configuration.",
- "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
- "user_id": "User ID.",
- "instance_id": "ID of the PostgresFlex instance.",
- "project_id": "STACKIT project ID to which the instance is associated.",
- "username": "The name of the user.",
- "roles": "Database access levels for the user. " + utils.FormatPossibleValues(rolesOptions...),
- "region": "The resource region. If not defined, the provider region is used.",
- "status": "The current status of the user.",
- "password": "The password for the user. This is only set upon creation.",
- "host": "The host of the Postgres Flex instance.",
- "port": "The port of the Postgres Flex instance.",
- "connection_string": "The connection string for the user to the instance.",
+ fields, err := utils.ReadModifiersConfig(modifiersFileByte)
+ if err != nil {
+ resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
+ return
}
- resp.Schema = schema.Schema{
- Description: descriptions["main"],
- Attributes: map[string]schema.Attribute{
- "id": schema.StringAttribute{
- Description: descriptions["id"],
- Computed: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.UseStateForUnknown(),
- },
- },
- "user_id": schema.Int64Attribute{
- Description: descriptions["user_id"],
- Computed: true,
- PlanModifiers: []planmodifier.Int64{
- int64planmodifier.UseStateForUnknown(),
- },
- Validators: []validator.Int64{},
- },
- "instance_id": schema.StringAttribute{
- Description: descriptions["instance_id"],
- Required: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- stringplanmodifier.UseStateForUnknown(),
- },
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
- },
- "project_id": schema.StringAttribute{
- Description: descriptions["project_id"],
- Required: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- stringplanmodifier.UseStateForUnknown(),
- },
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
- },
- "username": schema.StringAttribute{
- Description: descriptions["username"],
- Required: true,
- PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- },
- },
- "roles": schema.SetAttribute{
- Description: descriptions["roles"],
- ElementType: types.StringType,
- Required: true,
- Validators: []validator.Set{
- setvalidator.ValueStringsAre(
- stringvalidator.OneOf(rolesOptions...),
- ),
- },
- },
- "password": schema.StringAttribute{
- Description: descriptions["password"],
- Computed: true,
- Sensitive: true,
- },
- "host": schema.StringAttribute{
- Description: descriptions["host"],
- Computed: true,
- },
- "port": schema.Int64Attribute{
- Description: descriptions["port"],
- Computed: true,
- },
- "region": schema.StringAttribute{
- Optional: true,
- // must be computed to allow for storing the override value from the provider
- Computed: true,
- Description: descriptions["region"],
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
- },
- "status": schema.StringAttribute{
- Description: descriptions["status"],
- Computed: true,
- },
- "connection_string": schema.StringAttribute{
- Description: descriptions["connection_string"],
- Computed: true,
- },
- },
+ err = utils.AddPlanModifiersToResourceSchema(fields, &s)
+ if err != nil {
+ resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
+ return
}
+ resp.Schema = s
}
// Create creates the resource and sets the initial Terraform state.
@@ -242,16 +144,33 @@ func (r *userResource) Create(
req resource.CreateRequest,
resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model resourceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // Read identity data
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
ctx = core.InitProviderContext(ctx)
- ctx = r.setTFLogFields(ctx, &model)
- arg := r.getClientArg(&model)
+
+ arg, errExt := r.extractIdentityData(model, identityData)
+ if errExt != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ extractErrorSummary,
+ fmt.Sprintf(extractErrorMessage, errExt),
+ )
+ }
+
+ ctx = r.setTFLogFields(ctx, arg)
var roles = r.expandRoles(ctx, model.Roles, &resp.Diagnostics)
if resp.Diagnostics.HasError() {
@@ -277,8 +196,6 @@ func (r *userResource) Create(
return
}
- ctx = core.LogResponse(ctx)
-
if userResp.Id == nil || *userResp.Id == 0 {
core.LogAndAddError(
ctx,
@@ -288,12 +205,28 @@ func (r *userResource) Create(
)
return
}
+ model.Id = types.Int64PointerValue(userResp.Id)
model.UserId = types.Int64PointerValue(userResp.Id)
model.Password = types.StringPointerValue(userResp.Password)
ctx = tflog.SetField(ctx, "user_id", *userResp.Id)
- exists, err := r.getUserResource(ctx, &model)
+ ctx = core.LogResponse(ctx)
+
+ // Set data returned by API in identity
+ identity := UserResourceIdentityModel{
+ ProjectID: types.StringValue(arg.projectId),
+ Region: types.StringValue(arg.region),
+ InstanceID: types.StringValue(arg.instanceId),
+ UserID: types.Int64PointerValue(userResp.Id),
+ }
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Verify creation
+ exists, err := r.getUserResource(ctx, &model, arg)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
@@ -322,16 +255,38 @@ func (r *userResource) Read(
req resource.ReadRequest,
resp *resource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // Read identity data
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
ctx = core.InitProviderContext(ctx)
- exists, err := r.getUserResource(ctx, &model)
+ arg, errExt := r.extractIdentityData(model, identityData)
+ if errExt != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ extractErrorSummary,
+ fmt.Sprintf(extractErrorMessage, errExt),
+ )
+ }
+
+ ctx = r.setTFLogFields(ctx, arg)
+
+ ctx = core.InitProviderContext(ctx)
+
+ // Read resource state
+ exists, err := r.getUserResource(ctx, &model, arg)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading user", fmt.Sprintf("Calling API: %v", err))
@@ -360,19 +315,37 @@ func (r *userResource) Update(
req resource.UpdateRequest,
resp *resource.UpdateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model resourceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // Read identity data
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ arg, errExt := r.extractIdentityData(model, identityData)
+ if errExt != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ extractErrorSummary,
+ fmt.Sprintf(extractErrorMessage, errExt),
+ )
+ }
+
+ ctx = r.setTFLogFields(ctx, arg)
ctx = core.InitProviderContext(ctx)
- ctx = r.setTFLogFields(ctx, &model)
- arg := r.getClientArg(&model)
// Retrieve values from state
- var stateModel Model
+ var stateModel resourceModel
diags = req.State.Get(ctx, &stateModel)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -413,7 +386,8 @@ func (r *userResource) Update(
ctx = core.LogResponse(ctx)
- exists, err := r.getUserResource(ctx, &stateModel)
+ // Verify update
+ exists, err := r.getUserResource(ctx, &stateModel, arg)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Calling API: %v", err))
@@ -443,16 +417,33 @@ func (r *userResource) Delete(
req resource.DeleteRequest,
resp *resource.DeleteResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // Read identity data
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
ctx = core.InitProviderContext(ctx)
- ctx = r.setTFLogFields(ctx, &model)
- arg := r.getClientArg(&model)
+
+ arg, errExt := r.extractIdentityData(model, identityData)
+ if errExt != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ extractErrorSummary,
+ fmt.Sprintf(extractErrorMessage, errExt),
+ )
+ }
+
+ ctx = r.setTFLogFields(ctx, arg)
+ ctx = core.InitProviderContext(ctx)
userId64 := arg.userId
if userId64 > math.MaxInt32 {
@@ -469,7 +460,8 @@ func (r *userResource) Delete(
ctx = core.LogResponse(ctx)
- exists, err := r.getUserResource(ctx, &model)
+ // Verify deletion
+ exists, err := r.getUserResource(ctx, &model, arg)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
return
@@ -487,40 +479,31 @@ func (r *userResource) Delete(
tflog.Info(ctx, "Postgres Flex user deleted")
}
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *userResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
+// IdentitySchema defines the fields that are required to uniquely identify a resource.
+func (r *userResource) IdentitySchema(
+ _ context.Context,
+ _ resource.IdentitySchemaRequest,
+ response *resource.IdentitySchemaResponse,
) {
- idParts := strings.Split(req.ID, core.Separator)
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
- req.ID,
- ),
- )
- return
+ response.IdentitySchema = identityschema.Schema{
+ Attributes: map[string]identityschema.Attribute{
+ "project_id": identityschema.StringAttribute{
+ RequiredForImport: true,
+ },
+ "region": identityschema.StringAttribute{
+ RequiredForImport: true,
+ },
+ "instance_id": identityschema.StringAttribute{
+ RequiredForImport: true,
+ },
+ "user_id": identityschema.Int64Attribute{
+ RequiredForImport: true,
+ },
+ },
}
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), idParts[3])...)
- core.LogAndAddWarning(
- ctx,
- &resp.Diagnostics,
- "postgresflexalpha user imported with empty password and empty uri",
- "The user password and uri are not imported as they are only available upon creation of a new user. The password and uri fields will be empty.",
- )
- tflog.Info(ctx, "postgresflexalpha user state imported")
}
-func mapFields(userResp *postgresflex.GetUserResponse, model *Model, region string) error {
+func mapFields(userResp *postgresflex.GetUserResponse, model *resourceModel, region string) error {
if userResp == nil {
return fmt.Errorf("response is nil")
}
@@ -537,14 +520,12 @@ func mapFields(userResp *postgresflex.GetUserResponse, model *Model, region stri
} else {
return fmt.Errorf("user id not present")
}
- model.Id = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
- )
+
model.UserId = types.Int64Value(userId)
- model.Username = types.StringPointerValue(user.Name)
+ model.Name = types.StringPointerValue(user.Name)
if user.Roles == nil {
- model.Roles = types.SetNull(types.StringType)
+ model.Roles = types.List(types.SetNull(types.StringType))
} else {
var roles []attr.Value
for _, role := range *user.Roles {
@@ -554,27 +535,21 @@ func mapFields(userResp *postgresflex.GetUserResponse, model *Model, region stri
if diags.HasError() {
return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
}
- model.Roles = rolesSet
+ model.Roles = types.List(rolesSet)
}
- model.Host = types.StringPointerValue(user.Host)
- model.Port = types.Int64PointerValue(user.Port)
model.Region = types.StringValue(region)
model.Status = types.StringPointerValue(user.Status)
- model.ConnectionString = types.StringPointerValue(user.ConnectionString)
return nil
}
// getUserResource refreshes the resource state by calling the API and mapping the response to the model.
// Returns true if the resource state was successfully refreshed, false if the resource does not exist.
-func (r *userResource) getUserResource(ctx context.Context, model *Model) (bool, error) {
- ctx = r.setTFLogFields(ctx, model)
- arg := r.getClientArg(model)
+func (r *userResource) getUserResource(ctx context.Context, model *resourceModel, arg *clientArg) (bool, error) {
- userId64 := arg.userId
- if userId64 > math.MaxInt32 {
+ if arg.userId > math.MaxInt32 {
return false, errors.New("error in type conversion: int value too large (userId)")
}
- userId := int32(userId64)
+ userId := int32(arg.userId)
// API Call
userResp, err := r.client.GetUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
@@ -588,13 +563,14 @@ func (r *userResource) getUserResource(ctx context.Context, model *Model) (bool,
return false, fmt.Errorf("error fetching user resource: %w", err)
}
- if err := mapFields(userResp, model, arg.region); err != nil {
+ if err := mapResourceFields(userResp, model, arg.region); err != nil {
return false, fmt.Errorf("error mapping user resource: %w", err)
}
return true, nil
}
+// clientArg holds the arguments for API calls.
type clientArg struct {
projectId string
instanceId string
@@ -602,29 +578,137 @@ type clientArg struct {
userId int64
}
-// getClientArg constructs client arguments from the model.
-func (r *userResource) getClientArg(model *Model) *clientArg {
- return &clientArg{
- projectId: model.ProjectId.ValueString(),
- instanceId: model.InstanceId.ValueString(),
- region: r.providerData.GetRegionWithOverride(model.Region),
- userId: model.UserId.ValueInt64(),
+// ImportState imports a resource into the Terraform state on success.
+// The expected import identifier format is: [project_id],[region],[instance_id],[database_id]
+func (r *userResource) ImportState(
+ ctx context.Context,
+ req resource.ImportStateRequest,
+ resp *resource.ImportStateResponse,
+) {
+
+ ctx = core.InitProviderContext(ctx)
+
+ if req.ID != "" {
+
+ idParts := strings.Split(req.ID, core.Separator)
+
+ if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing user",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
+ req.ID,
+ ),
+ )
+ return
+ }
+
+ userId, err := strconv.ParseInt(idParts[3], 10, 64)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error importing user",
+ fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
+
+ tflog.Info(ctx, "Postgres Flex user state imported")
+
+ return
}
+
+ // If no ID is provided, attempt to read identity attributes from the import configuration
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+ userId := identityData.UserID.ValueInt64()
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
+
+ tflog.Info(ctx, "Postgres Flex user state imported")
+}
+
+// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model.
+func (r *userResource) extractIdentityData(
+ model resourceModel,
+ identity UserResourceIdentityModel,
+) (*clientArg, error) {
+
+ var projectId, region, instanceId string
+ var userId int64
+
+ if !model.UserId.IsNull() && !model.UserId.IsUnknown() {
+ userId = model.UserId.ValueInt64()
+ } else {
+ if identity.UserID.IsNull() || identity.UserID.IsUnknown() {
+ return nil, fmt.Errorf("user_id not found in config")
+ }
+ userId = identity.UserID.ValueInt64()
+ }
+
+ if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
+ projectId = model.ProjectId.ValueString()
+ } else {
+ if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
+ return nil, fmt.Errorf("project_id not found in config")
+ }
+ projectId = identity.ProjectID.ValueString()
+ }
+
+ if !model.Region.IsNull() && !model.Region.IsUnknown() {
+ region = r.providerData.GetRegionWithOverride(model.Region)
+ } else {
+ if identity.Region.IsNull() || identity.Region.IsUnknown() {
+ return nil, fmt.Errorf("region not found in config")
+ }
+ region = r.providerData.GetRegionWithOverride(identity.Region)
+ }
+
+ if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
+ instanceId = model.InstanceId.ValueString()
+ } else {
+ if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
+ return nil, fmt.Errorf("instance_id not found in config")
+ }
+ instanceId = identity.InstanceID.ValueString()
+ }
+ return &clientArg{
+ projectId: projectId,
+ instanceId: instanceId,
+ region: region,
+ userId: userId,
+ }, nil
}
// setTFLogFields adds relevant fields to the context for terraform logging purposes.
-func (r *userResource) setTFLogFields(ctx context.Context, model *Model) context.Context {
- usrCtx := r.getClientArg(model)
-
- ctx = tflog.SetField(ctx, "project_id", usrCtx.projectId)
- ctx = tflog.SetField(ctx, "instance_id", usrCtx.instanceId)
- ctx = tflog.SetField(ctx, "user_id", usrCtx.userId)
- ctx = tflog.SetField(ctx, "region", usrCtx.region)
+func (r *userResource) setTFLogFields(ctx context.Context, arg *clientArg) context.Context {
+ ctx = tflog.SetField(ctx, "project_id", arg.projectId)
+ ctx = tflog.SetField(ctx, "instance_id", arg.instanceId)
+ ctx = tflog.SetField(ctx, "region", arg.region)
+ ctx = tflog.SetField(ctx, "user_id", arg.userId)
return ctx
}
-func (r *userResource) expandRoles(ctx context.Context, rolesSet types.Set, diags *diag.Diagnostics) []string {
+// expandRoles converts a Terraform list of roles to a string slice.
+func (r *userResource) expandRoles(ctx context.Context, rolesSet types.List, diags *diag.Diagnostics) []string {
if rolesSet.IsNull() || rolesSet.IsUnknown() {
return nil
}
@@ -632,42 +716,3 @@ func (r *userResource) expandRoles(ctx context.Context, rolesSet types.Set, diag
diags.Append(rolesSet.ElementsAs(ctx, &roles, false)...)
return roles
}
-
-func toCreatePayload(model *Model, roles *[]string) (*postgresflex.CreateUserRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
- if roles == nil {
- return nil, fmt.Errorf("nil roles")
- }
-
- return &postgresflex.CreateUserRequestPayload{
- Roles: toPayloadRoles(roles),
- Name: conversion.StringValueToPointer(model.Username),
- }, nil
-}
-
-func toPayloadRoles(roles *[]string) *[]postgresflex.UserRole {
- var userRoles = make([]postgresflex.UserRole, 0, len(*roles))
- for _, role := range *roles {
- userRoles = append(userRoles, postgresflex.UserRole(role))
- }
- return &userRoles
-}
-
-func toUpdatePayload(model *Model, roles *[]string) (
- *postgresflex.UpdateUserRequestPayload,
- error,
-) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
- if roles == nil {
- return nil, fmt.Errorf("nil roles")
- }
-
- return &postgresflex.UpdateUserRequestPayload{
- Name: conversion.StringValueToPointer(model.Username),
- Roles: toPayloadRoles(roles),
- }, nil
-}
diff --git a/stackit/internal/services/postgresflexalpha/user/resource_test.go b/stackit/internal/services/postgresflexalpha/user/resource_test.go
deleted file mode 100644
index e4a13482..00000000
--- a/stackit/internal/services/postgresflexalpha/user/resource_test.go
+++ /dev/null
@@ -1,448 +0,0 @@
-package postgresflexalpha
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
-)
-
-func TestMapFieldsCreate(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *postgresflexalpha.GetUserResponse
- region string
- expected Model
- isValid bool
- }{
- {
- "default_values",
- &postgresflexalpha.GetUserResponse{
- Id: utils.Ptr(int64(1)),
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.SetNull(types.StringType),
- Password: types.StringNull(),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- Status: types.StringNull(),
- ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "simple_values",
- &postgresflexalpha.GetUserResponse{
- Id: utils.Ptr(int64(1)),
- Name: utils.Ptr("username"),
- ConnectionString: utils.Ptr("connection_string"),
- Status: utils.Ptr("status"),
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.SetNull(types.StringType),
- Password: types.StringNull(),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("status"),
- ConnectionString: types.StringValue("connection_string"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &postgresflexalpha.GetUserResponse{
- Id: utils.Ptr(int64(1)),
- Name: nil,
- ConnectionString: nil,
- Status: nil,
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.SetNull(types.StringType),
- Password: types.StringNull(),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- Status: types.StringNull(),
- ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- Model{},
- false,
- },
- {
- "nil_response_2",
- &postgresflexalpha.GetUserResponse{},
- testRegion,
- Model{},
- false,
- },
- {
- "no_resource_id",
- &postgresflexalpha.GetUserResponse{},
- testRegion,
- Model{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &Model{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- }
-
- err := mapFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(state, &tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *postgresflexalpha.GetUserResponse
- region string
- expected Model
- isValid bool
- }{
- {
- "default_values",
- &postgresflexalpha.GetUserResponse{},
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(int64(1)),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.SetNull(types.StringType),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- Status: types.StringNull(),
- ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "simple_values",
- &postgresflexalpha.GetUserResponse{
- Roles: &[]postgresflexalpha.UserRole{
- "role_1",
- "role_2",
- "",
- },
- Name: utils.Ptr("username"),
- Host: utils.Ptr("host"),
- Port: utils.Ptr(int64(1234)),
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- types.StringValue(""),
- },
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringNull(),
- ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &postgresflexalpha.GetUserResponse{
- Id: utils.Ptr(int64(1)),
- Name: nil,
- Host: nil,
- Port: utils.Ptr(int64(2123456789)),
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.SetNull(types.StringType),
- Host: types.StringNull(),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- Status: types.StringNull(),
- ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- Model{},
- false,
- },
- {
- "nil_response_2",
- &postgresflexalpha.GetUserResponse{},
- testRegion,
- Model{},
- false,
- },
- {
- "no_resource_id",
- &postgresflexalpha.GetUserResponse{},
- testRegion,
- Model{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &Model{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(state, &tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- tests := []struct {
- description string
- input *Model
- inputRoles *[]string
- expected *postgresflexalpha.CreateUserRequestPayload
- isValid bool
- }{
- {
- "default_values",
- &Model{},
- &[]string{},
- &postgresflexalpha.CreateUserRequestPayload{
- Name: nil,
- Roles: &[]postgresflexalpha.UserRole{},
- },
- true,
- },
- {
- "simple_values",
- &Model{
- Username: types.StringValue("username"),
- },
- &[]string{
- "role_1",
- "role_2",
- },
- &postgresflexalpha.CreateUserRequestPayload{
- Name: utils.Ptr("username"),
- Roles: &[]postgresflexalpha.UserRole{
- "role_1",
- "role_2",
- },
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &Model{
- Username: types.StringNull(),
- },
- &[]string{
- "",
- },
- &postgresflexalpha.CreateUserRequestPayload{
- Roles: &[]postgresflexalpha.UserRole{
- "",
- },
- Name: nil,
- },
- true,
- },
- {
- "nil_model",
- nil,
- &[]string{},
- nil,
- false,
- },
- {
- "nil_roles",
- &Model{},
- nil,
- nil,
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- output, err := toCreatePayload(tt.input, tt.inputRoles)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(output, tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToUpdatePayload(t *testing.T) {
- tests := []struct {
- description string
- input *Model
- inputRoles *[]string
- expected *postgresflexalpha.UpdateUserRequestPayload
- isValid bool
- }{
- {
- "default_values",
- &Model{},
- &[]string{},
- &postgresflexalpha.UpdateUserRequestPayload{
- Roles: &[]postgresflexalpha.UserRole{},
- },
- true,
- },
- {
- "default_values",
- &Model{
- Username: types.StringValue("username"),
- },
- &[]string{
- "role_1",
- "role_2",
- },
- &postgresflexalpha.UpdateUserRequestPayload{
- Name: utils.Ptr("username"),
- Roles: &[]postgresflexalpha.UserRole{
- "role_1",
- "role_2",
- },
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &Model{
- Username: types.StringNull(),
- },
- &[]string{
- "",
- },
- &postgresflexalpha.UpdateUserRequestPayload{
- Roles: &[]postgresflexalpha.UserRole{
- "",
- },
- },
- true,
- },
- {
- "nil_model",
- nil,
- &[]string{},
- nil,
- false,
- },
- {
- "nil_roles",
- &Model{},
- nil,
- nil,
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- output, err := toUpdatePayload(tt.input, tt.inputRoles)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(output, tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go
index 9734c2a9..f07ab701 100644
--- a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go
+++ b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go
@@ -19,11 +19,6 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
Description: "The connection string for the user to the instance.",
MarkdownDescription: "The connection string for the user to the instance.",
},
- "host": schema.StringAttribute{
- Computed: true,
- Description: "The host of the instance in which the user belongs to.",
- MarkdownDescription: "The host of the instance in which the user belongs to.",
- },
"id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
@@ -45,11 +40,6 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
Description: "The password for the user.",
MarkdownDescription: "The password for the user.",
},
- "port": schema.Int64Attribute{
- Computed: true,
- Description: "The port of the instance in which the user belongs to.",
- MarkdownDescription: "The port of the instance in which the user belongs to.",
- },
"project_id": schema.StringAttribute{
Optional: true,
Computed: true,
@@ -91,12 +81,10 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
type UserModel struct {
ConnectionString types.String `tfsdk:"connection_string"`
- Host types.String `tfsdk:"host"`
Id types.Int64 `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Roles types.List `tfsdk:"roles"`
diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasource.go b/stackit/internal/services/sqlserverflexalpha/database/datasource.go
index cd796159..3c201b5a 100644
--- a/stackit/internal/services/sqlserverflexalpha/database/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/database/datasource.go
@@ -4,6 +4,8 @@ import (
"context"
"github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
@@ -12,6 +14,12 @@ import (
sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
)
+// dataSourceModel maps the data source schema data.
+type dataSourceModel struct {
+ sqlserverflexalphaGen.DatabaseModel
+ TerraformID types.String `tfsdk:"id"`
+}
+
var _ datasource.DataSource = (*databaseDataSource)(nil)
func NewDatabaseDataSource() datasource.DataSource {
@@ -23,16 +31,31 @@ type databaseDataSource struct {
providerData core.ProviderData
}
-func (d *databaseDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+func (d *databaseDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database"
}
func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexalphaGen.DatabaseDataSourceSchema(ctx)
+ s := sqlserverflexalphaGen.DatabaseDataSourceSchema(ctx)
+ s.Attributes["id"] = schema.StringAttribute{
+ Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," +
+ "`database_id`\\\".\",",
+ Computed: true,
+ }
+
+ resp.Schema = s
}
// Configure adds the provider configured client to the data source.
-func (d *databaseDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+func (d *databaseDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
var ok bool
d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
@@ -48,7 +71,7 @@ func (d *databaseDataSource) Configure(ctx context.Context, req datasource.Confi
}
func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data sqlserverflexalphaGen.DatabaseModel
+ var data dataSourceModel
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go
index 25406f5f..82250802 100644
--- a/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go
+++ b/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go
@@ -29,7 +29,7 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The name of the database.",
MarkdownDescription: "The name of the database.",
},
- "id": schema.Int64Attribute{
+ "tf_original_api_id": schema.Int64Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@@ -72,7 +72,7 @@ type DatabaseModel struct {
CollationName types.String `tfsdk:"collation_name"`
CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"`
DatabaseName types.String `tfsdk:"database_name"`
- Id types.Int64 `tfsdk:"id"`
+ Id types.Int64 `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Owner types.String `tfsdk:"owner"`
diff --git a/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml
new file mode 100644
index 00000000..d6209230
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml
@@ -0,0 +1,50 @@
+fields:
+ - name: 'id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'instance_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'project_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'region'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'name'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'collation'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'owner'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'database_name'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'collation_name'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'compatibility'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'compatibility_level'
+ modifiers:
+ - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go
index 52866a9c..f3dc6816 100644
--- a/stackit/internal/services/sqlserverflexalpha/database/resource.go
+++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go
@@ -2,11 +2,15 @@ package sqlserverflexalpha
import (
"context"
+ _ "embed"
"fmt"
+ "strconv"
"strings"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/config"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
@@ -23,12 +27,24 @@ var (
_ resource.ResourceWithConfigure = &databaseResource{}
_ resource.ResourceWithImportState = &databaseResource{}
_ resource.ResourceWithModifyPlan = &databaseResource{}
+ _ resource.ResourceWithIdentity = &databaseResource{}
)
func NewDatabaseResource() resource.Resource {
return &databaseResource{}
}
+// resourceModel describes the resource data model.
+type resourceModel = sqlserverflexalphaGen.DatabaseModel
+
+// DatabaseResourceIdentityModel describes the resource's identity attributes.
+type DatabaseResourceIdentityModel struct {
+ ProjectID types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ InstanceID types.String `tfsdk:"instance_id"`
+ DatabaseName types.String `tfsdk:"database_name"`
+}
+
type databaseResource struct {
client *sqlserverflexalpha.APIClient
providerData core.ProviderData
@@ -38,8 +54,47 @@ func (r *databaseResource) Metadata(_ context.Context, req resource.MetadataRequ
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database"
}
+//go:embed planModifiers.yaml
+var modifiersFileByte []byte
+
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = sqlserverflexalphaGen.DatabaseResourceSchema(ctx)
+ s := sqlserverflexalphaGen.DatabaseResourceSchema(ctx)
+
+ fields, err := utils.ReadModifiersConfig(modifiersFileByte)
+ if err != nil {
+ resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
+ return
+ }
+
+ err = utils.AddPlanModifiersToResourceSchema(fields, &s)
+ if err != nil {
+ resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
+ return
+ }
+ resp.Schema = s
+}
+
+func (r *databaseResource) IdentitySchema(
+ _ context.Context,
+ _ resource.IdentitySchemaRequest,
+ resp *resource.IdentitySchemaResponse,
+) {
+ resp.IdentitySchema = identityschema.Schema{
+ Attributes: map[string]identityschema.Attribute{
+ "project_id": identityschema.StringAttribute{
+ RequiredForImport: true, // must be set during import by the practitioner
+ },
+ "region": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ "instance_id": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ "database_name": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ },
+ }
}
// Configure adds the provider configured client to the resource.
@@ -59,7 +114,10 @@ func (r *databaseResource) Configure(
utils.UserAgentConfigOption(r.providerData.Version),
}
if r.providerData.PostgresFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.PostgresFlexCustomEndpoint))
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithEndpoint(r.providerData.PostgresFlexCustomEndpoint),
+ )
} else {
apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
}
@@ -67,7 +125,10 @@ func (r *databaseResource) Configure(
if err != nil {
resp.Diagnostics.AddError(
"Error configuring API client",
- fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", err),
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
)
return
}
@@ -97,7 +158,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
}
func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data sqlserverflexalphaGen.DatabaseModel
+ var data resourceModel
// Read Terraform prior state data into the model
resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
@@ -115,7 +176,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r
}
func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data sqlserverflexalphaGen.DatabaseModel
+ var data resourceModel
// Read Terraform plan data into the model
resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
@@ -182,36 +243,72 @@ func (r *databaseResource) ModifyPlan(
}
// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
+// The expected import identifier format is: [project_id],[region],[instance_id],[database_id]
func (r *databaseResource) ImportState(
ctx context.Context,
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- idParts := strings.Split(req.ID, core.Separator)
- // Todo: Import logic
- if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],..., got %q",
- req.ID,
- ),
+ ctx = core.InitProviderContext(ctx)
+
+ if req.ID != "" {
+
+ idParts := strings.Split(req.ID, core.Separator)
+
+ if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing database",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],[instance_id],[database_name], got %q",
+ req.ID,
+ ),
+ )
+ return
+ }
+
+ databaseId, err := strconv.ParseInt(idParts[3], 10, 64)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error importing database",
+ fmt.Sprintf("Invalid database_id format: %q. It must be a valid integer.", idParts[3]),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseId)...)
+
+ core.LogAndAddWarning(
+ ctx,
+ &resp.Diagnostics,
+ "Sqlserverflexalpha database imported with empty password",
+ "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
)
+ tflog.Info(ctx, "Sqlserverflexalpha database state imported")
+ }
+
+ // If no ID is provided, attempt to read identity attributes from the import configuration
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
return
}
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- // ... more ...
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+ databaseName := identityData.DatabaseName.ValueString()
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseName)...)
- core.LogAndAddWarning(
- ctx,
- &resp.Diagnostics,
- "Sqlserverflexalpha database imported with empty password",
- "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
- )
tflog.Info(ctx, "Sqlserverflexalpha database state imported")
}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go
index 1deb2beb..a48e7572 100644
--- a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go
@@ -26,6 +26,11 @@ var (
_ datasource.DataSourceWithConfigure = &flavorDataSource{}
)
+// NewFlavorDataSource is a helper function to simplify the provider implementation.
+func NewFlavorDataSource() datasource.DataSource {
+ return &flavorDataSource{}
+}
+
type FlavorModel struct {
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
@@ -41,11 +46,6 @@ type FlavorModel struct {
StorageClasses types.List `tfsdk:"storage_classes"`
}
-// NewFlavorDataSource is a helper function to simplify the provider implementation.
-func NewFlavorDataSource() datasource.DataSource {
- return &flavorDataSource{}
-}
-
// flavorDataSource is the data source implementation.
type flavorDataSource struct {
client *sqlserverflexalpha.APIClient
@@ -53,12 +53,20 @@ type flavorDataSource struct {
}
// Metadata returns the data source type name.
-func (r *flavorDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+func (r *flavorDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_flavor"
}
// Configure adds the provider configured client to the data source.
-func (r *flavorDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+func (r *flavorDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
var ok bool
r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
@@ -212,11 +220,13 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
model.MinGb = types.Int64Value(*f.MinGB)
if f.StorageClasses == nil {
- model.StorageClasses = types.ListNull(sqlserverflexalphaGen.StorageClassesType{
- ObjectType: basetypes.ObjectType{
- AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
+ model.StorageClasses = types.ListNull(
+ sqlserverflexalphaGen.StorageClassesType{
+ ObjectType: basetypes.ObjectType{
+ AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
+ },
},
- })
+ )
} else {
var scList []attr.Value
for _, sc := range *f.StorageClasses {
diff --git a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go
index 27609fc5..c1d4de36 100644
--- a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go
@@ -13,8 +13,12 @@ import (
sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen"
)
+// dataSourceModel maps the data source schema data.
+type dataSourceModel = sqlserverflexalphaGen.FlavorsModel
+
var _ datasource.DataSource = (*flavorsDataSource)(nil)
+// TODO: Use NewFlavorsDataSource when datasource is implemented
func NewFlavorsDataSource() datasource.DataSource {
return &flavorsDataSource{}
}
@@ -24,7 +28,11 @@ type flavorsDataSource struct {
providerData core.ProviderData
}
-func (d *flavorsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+func (d *flavorsDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_flavors"
}
@@ -33,7 +41,11 @@ func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaReque
}
// Configure adds the provider configured client to the data source.
-func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+func (d *flavorsDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
var ok bool
d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
@@ -49,7 +61,7 @@ func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.Config
}
func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data sqlserverflexalphaGen.FlavorsModel
+ var data dataSourceModel
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
diff --git a/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
index 43ac64f5..40f086e2 100644
--- a/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
+++ b/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
@@ -33,7 +33,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The flavor description.",
MarkdownDescription: "The flavor description.",
},
- "id": schema.StringAttribute{
+ "tf_original_api_id": schema.StringAttribute{
Computed: true,
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
index 3f8f787e..0d58140c 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
@@ -7,6 +7,8 @@ import (
"fmt"
"net/http"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen"
sqlserverflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen"
@@ -20,6 +22,12 @@ import (
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
+// dataSourceModel maps the data source schema data.
+type dataSourceModel struct {
+ sqlserverflexalpha2.InstanceModel
+ TerraformID types.String `tfsdk:"id"`
+}
+
// Ensure the implementation satisfies the expected interfaces.
var (
_ datasource.DataSource = &instanceDataSource{}
@@ -37,12 +45,20 @@ type instanceDataSource struct {
}
// Metadata returns the data source type name.
-func (r *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+func (r *instanceDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_instance"
}
// Configure adds the provider configured client to the data source.
-func (r *instanceDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+func (r *instanceDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
var ok bool
r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
@@ -59,167 +75,22 @@ func (r *instanceDataSource) Configure(ctx context.Context, req datasource.Confi
// Schema defines the schema for the data source.
func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- //descriptions := map[string]string{
- // "main": "SQLServer Flex ALPHA instance resource schema. Must have a `region` specified in the provider configuration.",
- // "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`\".",
- // "instance_id": "ID of the SQLServer Flex instance.",
- // "project_id": "STACKIT project ID to which the instance is associated.",
- // "name": "Instance name.",
- // "access_scope": "The access scope of the instance. (e.g. SNA)",
- // "acl": "The Access Control List (ACL) for the SQLServer Flex instance.",
- // "backup_schedule": `The backup schedule. Should follow the cron scheduling system format (e.g. "0 0 * * *")`,
- // "region": "The resource region. If not defined, the provider region is used.",
- // "encryption": "The encryption block.",
- // "network": "The network block.",
- // "keyring_id": "STACKIT KMS - KeyRing ID of the encryption key to use.",
- // "key_id": "STACKIT KMS - Key ID of the encryption key to use.",
- // "key_version": "STACKIT KMS - Key version to use in the encryption key.",
- // "service:account": "STACKIT KMS - service account to use in the encryption key.",
- // "instance_address": "The returned instance IP address of the SQLServer Flex instance.",
- // "router_address": "The returned router IP address of the SQLServer Flex instance.",
- //}
+ s := sqlserverflexalpha.InstanceDataSourceSchema(ctx)
+ s.Attributes["id"] = schema.StringAttribute{
+ Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`\\\".",
+ Computed: true,
+ }
- resp.Schema = sqlserverflexalpha.InstanceDataSourceSchema(ctx)
-
- //resp.Schema = schema.Schema{
- // Description: descriptions["main"],
- // Attributes: map[string]schema.Attribute{
- // "id": schema.StringAttribute{
- // Description: descriptions["id"],
- // Computed: true,
- // },
- // "instance_id": schema.StringAttribute{
- // Description: descriptions["instance_id"],
- // Required: true,
- // Validators: []validator.String{
- // validate.UUID(),
- // validate.NoSeparator(),
- // },
- // },
- // "project_id": schema.StringAttribute{
- // Description: descriptions["project_id"],
- // Required: true,
- // Validators: []validator.String{
- // validate.UUID(),
- // validate.NoSeparator(),
- // },
- // },
- // "name": schema.StringAttribute{
- // Description: descriptions["name"],
- // Computed: true,
- // },
- // "backup_schedule": schema.StringAttribute{
- // Description: descriptions["backup_schedule"],
- // Computed: true,
- // },
- // "is_deletable": schema.BoolAttribute{
- // Description: descriptions["is_deletable"],
- // Computed: true,
- // },
- // "flavor": schema.SingleNestedAttribute{
- // Computed: true,
- // Attributes: map[string]schema.Attribute{
- // "id": schema.StringAttribute{
- // Computed: true,
- // },
- // "description": schema.StringAttribute{
- // Computed: true,
- // },
- // "cpu": schema.Int64Attribute{
- // Computed: true,
- // },
- // "ram": schema.Int64Attribute{
- // Computed: true,
- // },
- // "node_type": schema.StringAttribute{
- // Computed: true,
- // },
- // },
- // },
- // "replicas": schema.Int64Attribute{
- // Computed: true,
- // },
- // "storage": schema.SingleNestedAttribute{
- // Computed: true,
- // Attributes: map[string]schema.Attribute{
- // "class": schema.StringAttribute{
- // Computed: true,
- // },
- // "size": schema.Int64Attribute{
- // Computed: true,
- // },
- // },
- // },
- // "version": schema.StringAttribute{
- // Computed: true,
- // },
- // "status": schema.StringAttribute{
- // Computed: true,
- // },
- // "edition": schema.StringAttribute{
- // Computed: true,
- // },
- // "retention_days": schema.Int64Attribute{
- // Computed: true,
- // },
- // "region": schema.StringAttribute{
- // // the region cannot be found, so it has to be passed
- // Optional: true,
- // Description: descriptions["region"],
- // },
- // "encryption": schema.SingleNestedAttribute{
- // Computed: true,
- // Attributes: map[string]schema.Attribute{
- // "key_id": schema.StringAttribute{
- // Description: descriptions["key_id"],
- // Computed: true,
- // },
- // "key_version": schema.StringAttribute{
- // Description: descriptions["key_version"],
- // Computed: true,
- // },
- // "keyring_id": schema.StringAttribute{
- // Description: descriptions["keyring_id"],
- // Computed: true,
- // },
- // "service_account": schema.StringAttribute{
- // Description: descriptions["service_account"],
- // Computed: true,
- // },
- // },
- // Description: descriptions["encryption"],
- // },
- // "network": schema.SingleNestedAttribute{
- // Computed: true,
- // Attributes: map[string]schema.Attribute{
- // "access_scope": schema.StringAttribute{
- // Description: descriptions["access_scope"],
- // Computed: true,
- // },
- // "instance_address": schema.StringAttribute{
- // Description: descriptions["instance_address"],
- // Computed: true,
- // },
- // "router_address": schema.StringAttribute{
- // Description: descriptions["router_address"],
- // Computed: true,
- // },
- // "acl": schema.ListAttribute{
- // Description: descriptions["acl"],
- // ElementType: types.StringType,
- // Computed: true,
- // },
- // },
- // Description: descriptions["network"],
- // },
- // },
- //}
+ resp.Schema = s
}
// Read refreshes the Terraform state with the latest data.
-func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
- //var model sqlserverflexalpha2.InstanceModel
- var model sqlserverflexalpha2.InstanceModel
+func (r *instanceDataSource) Read(
+ ctx context.Context,
+ req datasource.ReadRequest,
+ resp *datasource.ReadResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var model dataSourceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -279,10 +150,15 @@ func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadReques
// }
//}
- err = mapResponseToModel(ctx, instanceResp, &model, resp.Diagnostics)
+ err = mapFields(ctx, instanceResp, &model, resp.Diagnostics)
//err = mapFields(ctx, instanceResp, &model, storage, encryption, network, region)
if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", fmt.Sprintf("Processing API payload: %v", err))
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error reading instance",
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
return
}
// Set refreshed state
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go
index dcf7f6dd..5880a392 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go
@@ -65,7 +65,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
- "id": schema.StringAttribute{
+ "tf_original_api_id": schema.StringAttribute{
Computed: true,
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
@@ -178,7 +178,7 @@ type InstanceModel struct {
Edition types.String `tfsdk:"edition"`
Encryption EncryptionValue `tfsdk:"encryption"`
FlavorId types.String `tfsdk:"flavor_id"`
- Id types.String `tfsdk:"id"`
+ Id types.String `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
IsDeletable types.Bool `tfsdk:"is_deletable"`
Name types.String `tfsdk:"name"`
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go
index 783d95e1..77effc6c 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/functions.go
@@ -14,26 +14,21 @@ import (
sqlserverflexResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen"
)
-func mapResponseToModel(
+// instanceModel is a type constraint for models that can be mapped from a GetInstanceResponse.
+type instanceModel interface {
+ *dataSourceModel | *resourceModel
+}
+
+func mapFields[T instanceModel](
ctx context.Context,
resp *sqlserverflex.GetInstanceResponse,
- m *sqlserverflexResGen.InstanceModel,
+ m T,
tfDiags diag.Diagnostics,
) error {
- m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Edition = types.StringValue(string(resp.GetEdition()))
- m.Encryption = handleEncryption(m, resp)
- m.FlavorId = types.StringValue(resp.GetFlavorId())
- m.Id = types.StringValue(resp.GetId())
- m.InstanceId = types.StringValue(resp.GetId())
- m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
- m.Name = types.StringValue(resp.GetName())
netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
tfDiags.Append(diags...)
if diags.HasError() {
- return fmt.Errorf(
- "error converting network acl response value",
- )
+ return fmt.Errorf("error converting network acl response value")
}
net, diags := sqlserverflexResGen.NewNetworkValue(
sqlserverflexResGen.NetworkValue{}.AttributeTypes(ctx),
@@ -46,22 +41,8 @@ func mapResponseToModel(
)
tfDiags.Append(diags...)
if diags.HasError() {
- return fmt.Errorf(
- "error converting network response value",
- "access_scope",
- types.StringValue(string(resp.Network.GetAccessScope())),
- "acl",
- netAcl,
- "instance_address",
- types.StringValue(resp.Network.GetInstanceAddress()),
- "router_address",
- types.StringValue(resp.Network.GetRouterAddress()),
- )
+ return fmt.Errorf("error converting network response value")
}
- m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
- m.Status = types.StringValue(string(resp.GetStatus()))
stor, diags := sqlserverflexResGen.NewStorageValue(
sqlserverflexResGen.StorageValue{}.AttributeTypes(ctx),
@@ -74,14 +55,47 @@ func mapResponseToModel(
if diags.HasError() {
return fmt.Errorf("error converting storage response value")
}
- m.Storage = stor
- m.Version = types.StringValue(string(resp.GetVersion()))
+ // The interface conversion is safe due to the type constraint.
+ model := any(m)
+
+ if rm, ok := model.(*resourceModel); ok {
+ rm.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
+ rm.Edition = types.StringValue(string(resp.GetEdition()))
+ rm.Encryption = handleEncryption(rm.Encryption, resp)
+ rm.FlavorId = types.StringValue(resp.GetFlavorId())
+ rm.Id = types.StringValue(resp.GetId())
+ rm.InstanceId = types.StringValue(resp.GetId())
+ rm.IsDeletable = types.BoolValue(resp.GetIsDeletable())
+ rm.Name = types.StringValue(resp.GetName())
+ rm.Network = net
+ rm.Replicas = types.Int64Value(int64(resp.GetReplicas()))
+ rm.RetentionDays = types.Int64Value(resp.GetRetentionDays())
+ rm.Status = types.StringValue(string(resp.GetStatus()))
+ rm.Storage = stor
+ rm.Version = types.StringValue(string(resp.GetVersion()))
+ } else if dm, ok := model.(*dataSourceModel); ok {
+ dm.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
+ dm.Edition = types.StringValue(string(resp.GetEdition()))
+ dm.Encryption = handleEncryption(dm.Encryption, resp)
+ dm.FlavorId = types.StringValue(resp.GetFlavorId())
+ dm.Id = types.StringValue(resp.GetId())
+ dm.InstanceId = types.StringValue(resp.GetId())
+ dm.IsDeletable = types.BoolValue(resp.GetIsDeletable())
+ dm.Name = types.StringValue(resp.GetName())
+ dm.Network = net
+ dm.Replicas = types.Int64Value(int64(resp.GetReplicas()))
+ dm.RetentionDays = types.Int64Value(resp.GetRetentionDays())
+ dm.Status = types.StringValue(string(resp.GetStatus()))
+ dm.Storage = stor
+ dm.Version = types.StringValue(string(resp.GetVersion()))
+ }
+
return nil
}
func handleEncryption(
- m *sqlserverflexResGen.InstanceModel,
+ encryptionValue sqlserverflexResGen.EncryptionValue,
resp *sqlserverflex.GetInstanceResponse,
) sqlserverflexResGen.EncryptionValue {
if !resp.HasEncryption() ||
@@ -91,10 +105,10 @@ func handleEncryption(
resp.Encryption.KekKeyVersion == nil ||
resp.Encryption.ServiceAccount == nil {
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
+ if encryptionValue.IsNull() || encryptionValue.IsUnknown() {
return sqlserverflexResGen.NewEncryptionValueNull()
}
- return m.Encryption
+ return encryptionValue
}
enc := sqlserverflexResGen.NewEncryptionValueNull()
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go
index 9257c8df..76c73639 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go
@@ -11,7 +11,6 @@ import (
"time"
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
sqlserverflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen"
sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
@@ -37,23 +36,26 @@ var (
_ resource.ResourceWithIdentity = &instanceResource{}
)
+// NewInstanceResource is a helper function to simplify the provider implementation.
+func NewInstanceResource() resource.Resource {
+ return &instanceResource{}
+}
+
//nolint:unused // TODO: remove if not needed later
var validNodeTypes []string = []string{
"Single",
"Replica",
}
+// resourceModel describes the resource data model.
+type resourceModel = sqlserverflexalpha2.InstanceModel
+
type InstanceResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
InstanceID types.String `tfsdk:"instance_id"`
}
-// NewInstanceResource is a helper function to simplify the provider implementation.
-func NewInstanceResource() resource.Resource {
- return &instanceResource{}
-}
-
// instanceResource is the resource implementation.
type instanceResource struct {
client *sqlserverflexalpha.APIClient
@@ -140,270 +142,28 @@ var modifiersFileByte []byte
// Schema defines the schema for the resource.
func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- //descriptions := map[string]string{
- // "main": "SQLServer Flex ALPHA instance resource schema. Must have a `region` specified in the provider configuration.",
- // "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`\".",
- // "instance_id": "ID of the SQLServer Flex instance.",
- // "project_id": "STACKIT project ID to which the instance is associated.",
- // "name": "Instance name.",
- // "access_scope": "The access scope of the instance. (SNA | PUBLIC)",
- // "flavor_id": "The flavor ID of the instance.",
- // "acl": "The Access Control List (ACL) for the SQLServer Flex instance.",
- // "backup_schedule": `The backup schedule. Should follow the cron scheduling system format (e.g. "0 0 * * *")`,
- // "region": "The resource region. If not defined, the provider region is used.",
- // "encryption": "The encryption block.",
- // "replicas": "The number of replicas of the SQLServer Flex instance.",
- // "network": "The network block.",
- // "keyring_id": "STACKIT KMS - KeyRing ID of the encryption key to use.",
- // "key_id": "STACKIT KMS - Key ID of the encryption key to use.",
- // "key_version": "STACKIT KMS - Key version to use in the encryption key.",
- // "service:account": "STACKIT KMS - service account to use in the encryption key.",
- // "instance_address": "The returned instance IP address of the SQLServer Flex instance.",
- // "router_address": "The returned router IP address of the SQLServer Flex instance.",
- //}
schema := sqlserverflexalpha2.InstanceResourceSchema(ctx)
- fields, err := postgresflexUtils.ReadModifiersConfig(modifiersFileByte)
+ fields, err := utils.ReadModifiersConfig(modifiersFileByte)
if err != nil {
resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
return
}
- err = postgresflexUtils.AddPlanModifiersToResourceSchema(fields, &schema)
+ err = utils.AddPlanModifiersToResourceSchema(fields, &schema)
if err != nil {
resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
return
}
resp.Schema = schema
-
- //resp.Schema = schema.Schema{
- // Description: descriptions["main"],
- // Attributes: map[string]schema.Attribute{
- // "id": schema.StringAttribute{
- // Description: descriptions["id"],
- // Computed: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // },
- // "instance_id": schema.StringAttribute{
- // Description: descriptions["instance_id"],
- // Computed: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // Validators: []validator.String{
- // validate.UUID(),
- // validate.NoSeparator(),
- // },
- // },
- // "project_id": schema.StringAttribute{
- // Description: descriptions["project_id"],
- // Required: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // },
- // Validators: []validator.String{
- // validate.UUID(),
- // validate.NoSeparator(),
- // },
- // },
- // "name": schema.StringAttribute{
- // Description: descriptions["name"],
- // Required: true,
- // Validators: []validator.String{
- // stringvalidator.LengthAtLeast(1),
- // stringvalidator.RegexMatches(
- // regexp.MustCompile("^[a-z]([-a-z0-9]*[a-z0-9])?$"),
- // "must start with a letter, must have lower case letters, numbers or hyphens, and no hyphen at the end",
- // ),
- // },
- // },
- // "backup_schedule": schema.StringAttribute{
- // Description: descriptions["backup_schedule"],
- // Optional: true,
- // Computed: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // },
- // "is_deletable": schema.BoolAttribute{
- // Description: descriptions["is_deletable"],
- // Optional: true,
- // Computed: true,
- // PlanModifiers: []planmodifier.Bool{
- // boolplanmodifier.UseStateForUnknown(),
- // },
- // },
- // "flavor_id": schema.StringAttribute{
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // Required: true,
- // },
- // "replicas": schema.Int64Attribute{
- // Computed: true,
- // PlanModifiers: []planmodifier.Int64{
- // int64planmodifier.UseStateForUnknown(),
- // },
- // },
- // "storage": schema.SingleNestedAttribute{
- // Optional: true,
- // Computed: true,
- // PlanModifiers: []planmodifier.Object{
- // objectplanmodifier.UseStateForUnknown(),
- // },
- // Attributes: map[string]schema.Attribute{
- // "class": schema.StringAttribute{
- // Optional: true,
- // Computed: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // },
- // "size": schema.Int64Attribute{
- // Optional: true,
- // Computed: true,
- // PlanModifiers: []planmodifier.Int64{
- // int64planmodifier.UseStateForUnknown(),
- // },
- // },
- // },
- // },
- // "version": schema.StringAttribute{
- // Optional: true,
- // Computed: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // },
- // "edition": schema.StringAttribute{
- // Computed: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // },
- // "retention_days": schema.Int64Attribute{
- // Optional: true,
- // Computed: true,
- // PlanModifiers: []planmodifier.Int64{
- // int64planmodifier.UseStateForUnknown(),
- // },
- // },
- // "region": schema.StringAttribute{
- // Optional: true,
- // // must be computed to allow for storing the override value from the provider
- // Computed: true,
- // Description: descriptions["region"],
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // },
- // },
- // "status": schema.StringAttribute{
- // Optional: true,
- // // must be computed to allow for storing the override value from the provider
- // Computed: true,
- // Description: descriptions["status"],
- // },
- // "encryption": schema.SingleNestedAttribute{
- // Optional: true,
- // PlanModifiers: []planmodifier.Object{
- // objectplanmodifier.RequiresReplace(),
- // objectplanmodifier.UseStateForUnknown(),
- // },
- // Attributes: map[string]schema.Attribute{
- // "key_id": schema.StringAttribute{
- // Description: descriptions["key_id"],
- // Required: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // },
- // Validators: []validator.String{
- // validate.NoSeparator(),
- // },
- // },
- // "key_version": schema.StringAttribute{
- // Description: descriptions["key_version"],
- // Required: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // },
- // Validators: []validator.String{
- // validate.NoSeparator(),
- // },
- // },
- // "keyring_id": schema.StringAttribute{
- // Description: descriptions["keyring_id"],
- // Required: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // },
- // Validators: []validator.String{
- // validate.NoSeparator(),
- // },
- // },
- // "service_account": schema.StringAttribute{
- // Description: descriptions["service_account"],
- // Required: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // },
- // Validators: []validator.String{
- // validate.NoSeparator(),
- // },
- // },
- // },
- // Description: descriptions["encryption"],
- // },
- // "network": schema.SingleNestedAttribute{
- // Required: true,
- // Attributes: map[string]schema.Attribute{
- // "access_scope": schema.StringAttribute{
- // Description: descriptions["access_scope"],
- // Required: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.RequiresReplace(),
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // Validators: []validator.String{
- // validate.NoSeparator(),
- // },
- // },
- // "acl": schema.ListAttribute{
- // Description: descriptions["acl"],
- // ElementType: types.StringType,
- // Required: true,
- // PlanModifiers: []planmodifier.List{
- // listplanmodifier.UseStateForUnknown(),
- // },
- // },
- // "instance_address": schema.StringAttribute{
- // Description: descriptions["instance_address"],
- // Computed: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // },
- // "router_address": schema.StringAttribute{
- // Description: descriptions["router_address"],
- // Computed: true,
- // PlanModifiers: []planmodifier.String{
- // stringplanmodifier.UseStateForUnknown(),
- // },
- // },
- // },
- // Description: descriptions["network"],
- // },
- // },
- //}
}
-func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
+func (r *instanceResource) IdentitySchema(
+ _ context.Context,
+ _ resource.IdentitySchemaRequest,
+ resp *resource.IdentitySchemaResponse,
+) {
resp.IdentitySchema = identityschema.Schema{
Attributes: map[string]identityschema.Attribute{
"project_id": identityschema.StringAttribute{
@@ -425,7 +185,7 @@ func (r *instanceResource) Create(
req resource.CreateRequest,
resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model sqlserverflexalpha2.InstanceModel
+ var model resourceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -528,7 +288,7 @@ func (r *instanceResource) Create(
// Map response body to schema
// err = mapFields(ctx, waitResp, &model, storage, encryption, network, region)
- err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics)
+ err = mapFields(ctx, waitResp, &model, resp.Diagnostics)
if err != nil {
core.LogAndAddError(
ctx,
@@ -554,7 +314,7 @@ func (r *instanceResource) Read(
req resource.ReadRequest,
resp *resource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model sqlserverflexalpha2.InstanceModel
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -592,7 +352,7 @@ func (r *instanceResource) Read(
ctx = core.LogResponse(ctx)
// Map response body to schema
- err = mapResponseToModel(ctx, instanceResp, &model, resp.Diagnostics)
+ err = mapFields(ctx, instanceResp, &model, resp.Diagnostics)
if err != nil {
core.LogAndAddError(
ctx,
@@ -629,7 +389,7 @@ func (r *instanceResource) Update(
resp *resource.UpdateResponse,
) { // nolint:gocritic // function signature required by Terraform
// Retrieve values from plan
- var model sqlserverflexalpha2.InstanceModel
+ var model resourceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -683,7 +443,7 @@ func (r *instanceResource) Update(
}
// Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics)
+ err = mapFields(ctx, waitResp, &model, resp.Diagnostics)
// err = mapFields(ctx, waitResp, &model, storage, encryption, network, region)
if err != nil {
core.LogAndAddError(
@@ -709,7 +469,7 @@ func (r *instanceResource) Delete(
resp *resource.DeleteResponse,
) { // nolint:gocritic // function signature required by Terraform
// Retrieve values from state
- var model sqlserverflexalpha2.InstanceModel
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -754,20 +514,41 @@ func (r *instanceResource) ImportState(
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- // TODO
- idParts := strings.Split(req.ID, core.Separator)
+ if req.ID != "" {
+ idParts := strings.Split(req.ID, core.Separator)
- if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing instance",
- fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID),
- )
+ if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing instance",
+ fmt.Sprintf(
+ "Expected import identifier with format: [project_id],[region],[instance_id] Got: %q",
+ req.ID,
+ ),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
return
}
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ // If no ID is provided, attempt to read identity attributes from the import configuration
+ var identityData InstanceResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
+
tflog.Info(ctx, "SQLServer Flex instance state imported")
}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource.go b/stackit/internal/services/sqlserverflexalpha/user/datasource.go
index 9b083db0..282a713c 100644
--- a/stackit/internal/services/sqlserverflexalpha/user/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/user/datasource.go
@@ -28,7 +28,12 @@ var (
_ datasource.DataSource = &userDataSource{}
)
-type DataSourceModel struct {
+// NewUserDataSource is a helper function to simplify the provider implementation.
+func NewUserDataSource() datasource.DataSource {
+ return &userDataSource{}
+}
+
+type dataSourceModel struct {
Id types.String `tfsdk:"id"` // needed by TF
UserId types.Int64 `tfsdk:"user_id"`
InstanceId types.String `tfsdk:"instance_id"`
@@ -42,11 +47,6 @@ type DataSourceModel struct {
DefaultDatabase types.String `tfsdk:"default_database"`
}
-// NewUserDataSource is a helper function to simplify the provider implementation.
-func NewUserDataSource() datasource.DataSource {
- return &userDataSource{}
-}
-
// userDataSource is the data source implementation.
type userDataSource struct {
client *sqlserverflexalpha.APIClient
@@ -164,7 +164,7 @@ func (r *userDataSource) Read(
req datasource.ReadRequest,
resp *datasource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model DataSourceModel
+ var model dataSourceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -226,7 +226,7 @@ func (r *userDataSource) Read(
tflog.Info(ctx, "SQLServer Flex instance read")
}
-func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *DataSourceModel, region string) error {
+func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *dataSourceModel, region string) error {
if userResp == nil {
return fmt.Errorf("response is nil")
}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go b/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go
index b98c2e53..bd1fa093 100644
--- a/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go
+++ b/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go
@@ -16,14 +16,14 @@ func TestMapDataSourceFields(t *testing.T) {
description string
input *sqlserverflexalpha.GetUserResponse
region string
- expected DataSourceModel
+ expected dataSourceModel
isValid bool
}{
{
"default_values",
&sqlserverflexalpha.GetUserResponse{},
testRegion,
- DataSourceModel{
+ dataSourceModel{
Id: types.StringValue("pid,region,iid,1"),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
@@ -54,7 +54,7 @@ func TestMapDataSourceFields(t *testing.T) {
DefaultDatabase: utils.Ptr("default_db"),
},
testRegion,
- DataSourceModel{
+ dataSourceModel{
Id: types.StringValue("pid,region,iid,1"),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
@@ -85,7 +85,7 @@ func TestMapDataSourceFields(t *testing.T) {
Port: utils.Ptr(int64(2123456789)),
},
testRegion,
- DataSourceModel{
+ dataSourceModel{
Id: types.StringValue("pid,region,iid,1"),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
@@ -102,28 +102,28 @@ func TestMapDataSourceFields(t *testing.T) {
"nil_response",
nil,
testRegion,
- DataSourceModel{},
+ dataSourceModel{},
false,
},
{
"nil_response_2",
&sqlserverflexalpha.GetUserResponse{},
testRegion,
- DataSourceModel{},
+ dataSourceModel{},
false,
},
{
"no_resource_id",
&sqlserverflexalpha.GetUserResponse{},
testRegion,
- DataSourceModel{},
+ dataSourceModel{},
false,
},
}
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
- state := &DataSourceModel{
+ state := &dataSourceModel{
ProjectId: tt.expected.ProjectId,
InstanceId: tt.expected.InstanceId,
UserId: tt.expected.UserId,
diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go
index 3d252237..329469ea 100644
--- a/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go
+++ b/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go
@@ -98,7 +98,7 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
"users": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
- "id": schema.Int64Attribute{
+ "tf_original_api_id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
diff --git a/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml
new file mode 100644
index 00000000..b01aae98
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml
@@ -0,0 +1,58 @@
+fields:
+ - name: 'id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'user_id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'instance_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'project_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'username'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'roles'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'password'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'host'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'port'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'region'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'status'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'uri'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'default_database'
+ modifiers:
+ - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go
index c5cea986..41e6e1c5 100644
--- a/stackit/internal/services/sqlserverflexalpha/user/resource.go
+++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go
@@ -2,33 +2,27 @@ package sqlserverflexalpha
import (
"context"
+ _ "embed"
"errors"
"fmt"
"net/http"
"strconv"
"strings"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+ sqlserverflexalphagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen"
sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
-
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
// Ensure the implementation satisfies the expected interfaces.
@@ -39,26 +33,22 @@ var (
_ resource.ResourceWithModifyPlan = &userResource{}
)
-type Model struct {
- Id types.String `tfsdk:"id"` // needed by TF
- UserId types.Int64 `tfsdk:"user_id"`
- InstanceId types.String `tfsdk:"instance_id"`
- ProjectId types.String `tfsdk:"project_id"`
- Username types.String `tfsdk:"username"`
- Roles types.Set `tfsdk:"roles"`
- Password types.String `tfsdk:"password"`
- Host types.String `tfsdk:"host"`
- Port types.Int64 `tfsdk:"port"`
- Region types.String `tfsdk:"region"`
- Status types.String `tfsdk:"status"`
- DefaultDatabase types.String `tfsdk:"default_database"`
-}
-
// NewUserResource is a helper function to simplify the provider implementation.
func NewUserResource() resource.Resource {
return &userResource{}
}
+// resourceModel describes the resource data model.
+type resourceModel = sqlserverflexalphagen.UserModel
+
+// UserResourceIdentityModel describes the resource's identity attributes.
+type UserResourceIdentityModel struct {
+ ProjectID types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ InstanceID types.String `tfsdk:"instance_id"`
+ UserID types.Int64 `tfsdk:"user_id"`
+}
+
// userResource is the resource implementation.
type userResource struct {
client *sqlserverflexalpha.APIClient
@@ -93,7 +83,7 @@ func (r *userResource) ModifyPlan(
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
- var configModel Model
+ var configModel resourceModel
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
@@ -103,7 +93,7 @@ func (r *userResource) ModifyPlan(
return
}
- var planModel Model
+ var planModel resourceModel
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -120,107 +110,25 @@ func (r *userResource) ModifyPlan(
}
}
+//go:embed planModifiers.yaml
+var modifiersFileByte []byte
+
// Schema defines the schema for the resource.
-func (r *userResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- descriptions := map[string]string{
- "main": "SQLServer Flex user resource schema. Must have a `region` specified in the provider configuration.",
- "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
- "user_id": "User ID.",
- "instance_id": "ID of the SQLServer Flex instance.",
- "project_id": "STACKIT project ID to which the instance is associated.",
- "username": "Username of the SQLServer Flex instance.",
- "roles": "Database access levels for the user. The values for the default roles are: `##STACKIT_DatabaseManager##`, `##STACKIT_LoginManager##`, `##STACKIT_ProcessManager##`, `##STACKIT_ServerManager##`, `##STACKIT_SQLAgentManager##`, `##STACKIT_SQLAgentUser##`",
- "password": "Password of the user account.",
- "status": "Status of the user.",
- "default_database": "Default database of the user.",
+func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ s := sqlserverflexalphagen.UserResourceSchema(ctx)
+
+ fields, err := utils.ReadModifiersConfig(modifiersFileByte)
+ if err != nil {
+ resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
+ return
}
- resp.Schema = schema.Schema{
- Description: descriptions["main"],
- Attributes: map[string]schema.Attribute{
- "id": schema.StringAttribute{
- Description: descriptions["id"],
- Computed: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.UseStateForUnknown(),
- },
- },
- "user_id": schema.Int64Attribute{
- Description: descriptions["user_id"],
- Computed: true,
- PlanModifiers: []planmodifier.Int64{
- int64planmodifier.UseStateForUnknown(),
- },
- Validators: []validator.Int64{},
- },
- "instance_id": schema.StringAttribute{
- Description: descriptions["instance_id"],
- Required: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- stringplanmodifier.UseStateForUnknown(),
- },
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
- },
- "project_id": schema.StringAttribute{
- Description: descriptions["project_id"],
- Required: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- stringplanmodifier.UseStateForUnknown(),
- },
- Validators: []validator.String{
- validate.UUID(),
- validate.NoSeparator(),
- },
- },
- "username": schema.StringAttribute{
- Description: descriptions["username"],
- Required: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- stringplanmodifier.UseStateForUnknown(),
- },
- },
- "roles": schema.SetAttribute{
- Description: descriptions["roles"],
- ElementType: types.StringType,
- Required: true,
- PlanModifiers: []planmodifier.Set{
- setplanmodifier.RequiresReplace(),
- },
- },
- "password": schema.StringAttribute{
- Description: descriptions["password"],
- Computed: true,
- Sensitive: true,
- },
- "host": schema.StringAttribute{
- Computed: true,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- },
- "region": schema.StringAttribute{
- Optional: true,
- // must be computed to allow for storing the override value from the provider
- Computed: true,
- Description: descriptions["region"],
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
- },
- "status": schema.StringAttribute{
- Computed: true,
- },
- "default_database": schema.StringAttribute{
- Computed: true,
- },
- },
+ err = utils.AddPlanModifiersToResourceSchema(fields, &s)
+ if err != nil {
+ resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
+ return
}
+ resp.Schema = s
}
// Create creates the resource and sets the initial Terraform state.
@@ -229,7 +137,7 @@ func (r *userResource) Create(
req resource.CreateRequest,
resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model resourceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -313,7 +221,7 @@ func (r *userResource) Read(
req resource.ReadRequest,
resp *resource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model Model
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -387,7 +295,7 @@ func (r *userResource) Delete(
resp *resource.DeleteResponse,
) { // nolint:gocritic // function signature required by Terraform
// Retrieve values from plan
- var model Model
+ var model resourceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -429,23 +337,63 @@ func (r *userResource) ImportState(
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- idParts := strings.Split(req.ID, core.Separator)
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
- req.ID,
- ),
- )
+
+ ctx = core.InitProviderContext(ctx)
+
+ if req.ID != "" {
+
+ idParts := strings.Split(req.ID, core.Separator)
+
+ if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing user",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
+ req.ID,
+ ),
+ )
+ return
+ }
+
+ userId, err := strconv.ParseInt(idParts[3], 10, 64)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error importing user",
+ fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
+
+ tflog.Info(ctx, "Postgres Flex user state imported")
+
return
}
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), idParts[3])...)
+ // If no ID is provided, attempt to read identity attributes from the import configuration
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+ userId := identityData.UserID.ValueInt64()
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
+
core.LogAndAddWarning(
ctx,
&resp.Diagnostics,
@@ -455,7 +403,7 @@ func (r *userResource) ImportState(
tflog.Info(ctx, "SQLServer Flex user state imported")
}
-func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *Model, region string) error {
+func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *resourceModel, region string) error {
if userResp == nil {
return fmt.Errorf("response is nil")
}
@@ -468,12 +416,6 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *Mod
return fmt.Errorf("user id not present")
}
userId := *user.Id
- model.Id = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(),
- region,
- model.InstanceId.ValueString(),
- strconv.FormatInt(userId, 10),
- )
model.UserId = types.Int64Value(userId)
model.Username = types.StringPointerValue(user.Username)
@@ -491,11 +433,11 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *Mod
if diags.HasError() {
return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
}
- model.Roles = rolesSet
+ model.Roles = types.List(rolesSet)
}
if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.SetNull(types.StringType)
+ model.Roles = types.List(types.SetNull(types.StringType))
}
model.Host = types.StringPointerValue(user.Host)
@@ -507,7 +449,7 @@ func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *Mod
return nil
}
-func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *Model, region string) error {
+func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *resourceModel, region string) error {
if userResp == nil {
return fmt.Errorf("response is nil")
}
@@ -524,12 +466,7 @@ func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *Model, regio
} else {
return fmt.Errorf("user id not present")
}
- model.Id = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(),
- region,
- model.InstanceId.ValueString(),
- strconv.FormatInt(userId, 10),
- )
+
model.UserId = types.Int64Value(userId)
model.Username = types.StringPointerValue(user.Username)
@@ -542,11 +479,11 @@ func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *Model, regio
if diags.HasError() {
return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
}
- model.Roles = rolesSet
+ model.Roles = types.List(rolesSet)
}
if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.SetNull(types.StringType)
+ model.Roles = types.List(types.SetNull(types.StringType))
}
model.Host = types.StringPointerValue(user.Host)
@@ -556,7 +493,7 @@ func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *Model, regio
}
func toCreatePayload(
- model *Model,
+ model *resourceModel,
roles []sqlserverflexalpha.UserRole,
) (*sqlserverflexalpha.CreateUserRequestPayload, error) {
if model == nil {
diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource_test.go b/stackit/internal/services/sqlserverflexalpha/user/resource_test.go
index ad6bbf5a..e7ddddb1 100644
--- a/stackit/internal/services/sqlserverflexalpha/user/resource_test.go
+++ b/stackit/internal/services/sqlserverflexalpha/user/resource_test.go
@@ -16,7 +16,7 @@ func TestMapFieldsCreate(t *testing.T) {
description string
input *sqlserverflexalpha.CreateUserResponse
region string
- expected Model
+ expected resourceModel
isValid bool
}{
{
@@ -26,13 +26,13 @@ func TestMapFieldsCreate(t *testing.T) {
Password: utils.Ptr(""),
},
testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
+ resourceModel{
+ Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Username: types.StringNull(),
- Roles: types.SetNull(types.StringType),
+ Roles: types.List(types.SetNull(types.StringType)),
Password: types.StringValue(""),
Host: types.StringNull(),
Port: types.Int64Null(),
@@ -57,18 +57,20 @@ func TestMapFieldsCreate(t *testing.T) {
DefaultDatabase: utils.Ptr("default_db"),
},
testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,2"),
+ resourceModel{
+ Id: types.Int64Value(2),
UserId: types.Int64Value(2),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Username: types.StringValue("username"),
- Roles: types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- types.StringValue(""),
- },
+ Roles: types.List(
+ types.SetValueMust(
+ types.StringType, []attr.Value{
+ types.StringValue("role_1"),
+ types.StringValue("role_2"),
+ types.StringValue(""),
+ },
+ ),
),
Password: types.StringValue("password"),
Host: types.StringValue("host"),
@@ -90,13 +92,13 @@ func TestMapFieldsCreate(t *testing.T) {
Port: utils.Ptr(int64(2123456789)),
},
testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,3"),
+ resourceModel{
+ Id: types.Int64Value(3),
UserId: types.Int64Value(3),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Username: types.StringNull(),
- Roles: types.SetValueMust(types.StringType, []attr.Value{}),
+ Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
Password: types.StringValue(""),
Host: types.StringNull(),
Port: types.Int64Value(2123456789),
@@ -110,21 +112,21 @@ func TestMapFieldsCreate(t *testing.T) {
"nil_response",
nil,
testRegion,
- Model{},
+ resourceModel{},
false,
},
{
"nil_response_2",
&sqlserverflexalpha.CreateUserResponse{},
testRegion,
- Model{},
+ resourceModel{},
false,
},
{
"no_resource_id",
&sqlserverflexalpha.CreateUserResponse{},
testRegion,
- Model{},
+ resourceModel{},
false,
},
{
@@ -133,14 +135,14 @@ func TestMapFieldsCreate(t *testing.T) {
Id: utils.Ptr(int64(1)),
},
testRegion,
- Model{},
+ resourceModel{},
false,
},
}
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
- state := &Model{
+ state := &resourceModel{
ProjectId: tt.expected.ProjectId,
InstanceId: tt.expected.InstanceId,
}
@@ -168,20 +170,20 @@ func TestMapFields(t *testing.T) {
description string
input *sqlserverflexalpha.GetUserResponse
region string
- expected Model
+ expected resourceModel
isValid bool
}{
{
"default_values",
&sqlserverflexalpha.GetUserResponse{},
testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
+ resourceModel{
+ Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Username: types.StringNull(),
- Roles: types.SetNull(types.StringType),
+ Roles: types.List(types.SetNull(types.StringType)),
Host: types.StringNull(),
Port: types.Int64Null(),
Region: types.StringValue(testRegion),
@@ -201,18 +203,20 @@ func TestMapFields(t *testing.T) {
Port: utils.Ptr(int64(1234)),
},
testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,2"),
+ resourceModel{
+ Id: types.Int64Value(2),
UserId: types.Int64Value(2),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Username: types.StringValue("username"),
- Roles: types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- types.StringValue(""),
- },
+ Roles: types.List(
+ types.SetValueMust(
+ types.StringType, []attr.Value{
+ types.StringValue("role_1"),
+ types.StringValue("role_2"),
+ types.StringValue(""),
+ },
+ ),
),
Host: types.StringValue("host"),
Port: types.Int64Value(1234),
@@ -230,13 +234,13 @@ func TestMapFields(t *testing.T) {
Port: utils.Ptr(int64(2123456789)),
},
testRegion,
- Model{
- Id: types.StringValue("pid,region,iid,1"),
+ resourceModel{
+ Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Username: types.StringNull(),
- Roles: types.SetValueMust(types.StringType, []attr.Value{}),
+ Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
Host: types.StringNull(),
Port: types.Int64Value(2123456789),
Region: types.StringValue(testRegion),
@@ -247,28 +251,28 @@ func TestMapFields(t *testing.T) {
"nil_response",
nil,
testRegion,
- Model{},
+ resourceModel{},
false,
},
{
"nil_response_2",
&sqlserverflexalpha.GetUserResponse{},
testRegion,
- Model{},
+ resourceModel{},
false,
},
{
"no_resource_id",
&sqlserverflexalpha.GetUserResponse{},
testRegion,
- Model{},
+ resourceModel{},
false,
},
}
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
- state := &Model{
+ state := &resourceModel{
ProjectId: tt.expected.ProjectId,
InstanceId: tt.expected.InstanceId,
UserId: tt.expected.UserId,
@@ -294,14 +298,14 @@ func TestMapFields(t *testing.T) {
func TestToCreatePayload(t *testing.T) {
tests := []struct {
description string
- input *Model
+ input *resourceModel
inputRoles []sqlserverflexalpha.UserRole
expected *sqlserverflexalpha.CreateUserRequestPayload
isValid bool
}{
{
"default_values",
- &Model{},
+ &resourceModel{},
[]sqlserverflexalpha.UserRole{},
&sqlserverflexalpha.CreateUserRequestPayload{
Roles: &[]sqlserverflexalpha.UserRole{},
@@ -311,7 +315,7 @@ func TestToCreatePayload(t *testing.T) {
},
{
"default_values",
- &Model{
+ &resourceModel{
Username: types.StringValue("username"),
},
[]sqlserverflexalpha.UserRole{
@@ -329,7 +333,7 @@ func TestToCreatePayload(t *testing.T) {
},
{
"null_fields_and_int_conversions",
- &Model{
+ &resourceModel{
Username: types.StringNull(),
},
[]sqlserverflexalpha.UserRole{
@@ -352,7 +356,7 @@ func TestToCreatePayload(t *testing.T) {
},
{
"nil_roles",
- &Model{
+ &resourceModel{
Username: types.StringValue("username"),
},
[]sqlserverflexalpha.UserRole{},
diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go
index bb6c3038..063fe6d9 100644
--- a/stackit/internal/services/sqlserverflexbeta/database/datasource.go
+++ b/stackit/internal/services/sqlserverflexbeta/database/datasource.go
@@ -26,17 +26,21 @@ func NewDatabaseDataSource() datasource.DataSource {
return &databaseDataSource{}
}
+type dataSourceModel struct {
+ sqlserverflexbetaGen.DatabaseModel
+ TfId types.String `tfsdk:"id"`
+}
+
type databaseDataSource struct {
client *sqlserverflexbetaPkg.APIClient
providerData core.ProviderData
}
-type dsModel struct {
- sqlserverflexbetaGen.DatabaseModel
- TfId types.String `tfsdk:"id"`
-}
-
-func (d *databaseDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+func (d *databaseDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database"
}
@@ -92,7 +96,7 @@ func (d *databaseDataSource) Configure(
}
func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dsModel
+ var data dataSourceModel
readErr := "Read DB error"
// Read Terraform configuration data into the model
diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_fix.go b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go
similarity index 100%
rename from stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_fix.go
rename to stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go
diff --git a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml
new file mode 100644
index 00000000..d6209230
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml
@@ -0,0 +1,50 @@
+fields:
+ - name: 'id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'instance_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'project_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'region'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'name'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'collation'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'owner'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'database_name'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'collation_name'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'compatibility'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'compatibility_level'
+ modifiers:
+ - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go
index 5ae1d6c4..9c052dbb 100644
--- a/stackit/internal/services/sqlserverflexbeta/database/resource.go
+++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go
@@ -2,6 +2,7 @@ package sqlserverflexbeta
import (
"context"
+ _ "embed"
"fmt"
"strings"
"time"
@@ -35,6 +36,9 @@ func NewDatabaseResource() resource.Resource {
return &databaseResource{}
}
+// resourceModel describes the resource data model.
+type resourceModel = sqlserverflexbetaResGen.DatabaseModel
+
type databaseResource struct {
client *sqlserverflexbeta.APIClient
providerData core.ProviderData
@@ -47,15 +51,40 @@ type DatabaseResourceIdentityModel struct {
DatabaseName types.String `tfsdk:"database_name"`
}
-func (r *databaseResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+func (r *databaseResource) Metadata(
+ ctx context.Context,
+ req resource.MetadataRequest,
+ resp *resource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database"
}
+//go:embed planModifiers.yaml
+var modifiersFileByte []byte
+
func (r *databaseResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaResGen.DatabaseResourceSchema(ctx)
+
+ s := sqlserverflexbetaResGen.DatabaseResourceSchema(ctx)
+
+ fields, err := utils.ReadModifiersConfig(modifiersFileByte)
+ if err != nil {
+ resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
+ return
+ }
+
+ err = utils.AddPlanModifiersToResourceSchema(fields, &s)
+ if err != nil {
+ resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
+ return
+ }
+ resp.Schema = s
}
-func (r *databaseResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
+func (r *databaseResource) IdentitySchema(
+ _ context.Context,
+ _ resource.IdentitySchemaRequest,
+ resp *resource.IdentitySchemaResponse,
+) {
resp.IdentitySchema = identityschema.Schema{
Attributes: map[string]identityschema.Attribute{
"project_id": identityschema.StringAttribute{
@@ -91,7 +120,10 @@ func (r *databaseResource) Configure(
utils.UserAgentConfigOption(r.providerData.Version),
}
if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint))
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
+ )
} else {
apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
}
@@ -111,7 +143,7 @@ func (r *databaseResource) Configure(
}
func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data sqlserverflexbetaResGen.DatabaseModel
+ var data resourceModel
createErr := "DB create error"
// Read Terraform plan data into the model
@@ -243,7 +275,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
}
func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data sqlserverflexbetaResGen.DatabaseModel
+ var data resourceModel
readErr := "[Database Read]"
// Read Terraform prior state data into the model
@@ -298,7 +330,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r
}
func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data sqlserverflexbetaResGen.DatabaseModel
+ var data resourceModel
// Read Terraform prior state data into the model
resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
@@ -329,7 +361,7 @@ func (r *databaseResource) Update(ctx context.Context, req resource.UpdateReques
}
func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data sqlserverflexbetaResGen.DatabaseModel
+ var data resourceModel
// Read Terraform prior state data into the model
resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
@@ -422,9 +454,13 @@ func (r *databaseResource) ImportState(
idParts := strings.Split(req.ID, core.Separator)
if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(ctx, &resp.Diagnostics,
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
"Error importing database",
- fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id],[database_name] Got: %q", req.ID),
+ fmt.Sprintf(
+ "Expected import identifier with format: [project_id],[region],[instance_id],[database_name] Got: %q",
+ req.ID,
+ ),
)
return
}
@@ -449,21 +485,22 @@ func (r *databaseResource) ImportState(
return
}
+ // If no ID is provided, attempt to read identity attributes from the import configuration
var identityData DatabaseResourceIdentityModel
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
if resp.Diagnostics.HasError() {
return
}
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), identityData.DatabaseName.ValueString())...)
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+ databaseName := identityData.DatabaseName.ValueString()
- resp.Diagnostics.Append(resp.Identity.Set(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseName)...)
tflog.Info(ctx, "Sqlserverflexbeta database state imported")
}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go
new file mode 100644
index 00000000..b401e4ff
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go
@@ -0,0 +1,155 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+
+ sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen"
+)
+
+var _ datasource.DataSource = (*flavorsDataSource)(nil)
+
+const errorPrefix = "[Sqlserverflexbeta - Flavors]"
+
+func NewFlavorsDataSource() datasource.DataSource {
+ return &flavorsDataSource{}
+}
+
+type dataSourceModel struct {
+ sqlserverflexbetaGen.FlavorsModel
+ TerraformId types.String `tfsdk:"id"`
+}
+
+type flavorsDataSource struct {
+ client *sqlserverflexbetaPkg.APIClient
+ providerData core.ProviderData
+}
+
+func (d *flavorsDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavors"
+}
+
+func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = sqlserverflexbetaGen.FlavorsDataSourceSchema(ctx)
+ resp.Schema.Attributes["id"] = schema.StringAttribute{
+ Computed: true,
+ Description: "The terraform internal identifier.",
+ MarkdownDescription: "The terraform internal identifier.",
+ }
+}
+
+// Configure adds the provider configured client to the data source.
+func (d *flavorsDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
+ var ok bool
+ d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClientConfigOptions := []config.ConfigurationOption{
+ config.WithCustomAuth(d.providerData.RoundTripper),
+ utils.UserAgentConfigOption(d.providerData.Version),
+ }
+ if d.providerData.SQLServerFlexCustomEndpoint != "" {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
+ )
+ } else {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithRegion(d.providerData.GetRegion()),
+ )
+ }
+ apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error configuring API client",
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
+ )
+ return
+ }
+ d.client = apiClient
+ tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
+}
+
+func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data dataSourceModel
+
+ // Read Terraform configuration data into the model
+ resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := data.ProjectId.ValueString()
+ region := d.providerData.GetRegionWithOverride(data.Region)
+ // TODO: implement right identifier for flavors
+ flavorsId := data.FlavorsModel.Flavors
+
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // TODO: implement needed fields
+ ctx = tflog.SetField(ctx, "flavors_id", flavorsId)
+
+ // TODO: refactor to correct implementation
+ _, err := d.client.GetFlavorsRequest(ctx, projectId, region).Execute()
+ if err != nil {
+ utils.LogError(
+ ctx,
+ &resp.Diagnostics,
+ err,
+ "Reading flavors",
+ fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId),
+ map[int]string{
+ http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
+ },
+ )
+ resp.State.RemoveResource(ctx)
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ // TODO: refactor to correct implementation of internal tf id
+ data.TerraformId = utils.BuildInternalTerraformId(projectId, region)
+
+ // TODO: fill remaining fields
+ // data.Flavors = types.Sometype(apiResponse.GetFlavors())
+ // data.Page = types.Sometype(apiResponse.GetPage())
+ // data.Pagination = types.Sometype(apiResponse.GetPagination())
+ // data.ProjectId = types.Sometype(apiResponse.GetProjectId())
+ // data.Region = types.Sometype(apiResponse.GetRegion())
+ // data.Size = types.Sometype(apiResponse.GetSize())
+ // data.Sort = types.Sometype(apiResponse.GetSort())// Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, fmt.Sprintf("%s read successful", errorPrefix))
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go
new file mode 100644
index 00000000..a9d35ba1
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go
@@ -0,0 +1,1909 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "flavors": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "cpu": schema.Int64Attribute{
+ Computed: true,
+ Description: "The cpu count of the instance.",
+ MarkdownDescription: "The cpu count of the instance.",
+ },
+ "description": schema.StringAttribute{
+ Computed: true,
+ Description: "The flavor description.",
+ MarkdownDescription: "The flavor description.",
+ },
+ "tf_original_api_id": schema.StringAttribute{
+ Computed: true,
+ Description: "The id of the instance flavor.",
+ MarkdownDescription: "The id of the instance flavor.",
+ },
+ "max_gb": schema.Int64Attribute{
+ Computed: true,
+ Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ },
+ "memory": schema.Int64Attribute{
+ Computed: true,
+ Description: "The memory of the instance in Gibibyte.",
+ MarkdownDescription: "The memory of the instance in Gibibyte.",
+ },
+ "min_gb": schema.Int64Attribute{
+ Computed: true,
+ Description: "minimum storage which is required to order in Gigabyte.",
+ MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
+ },
+ "node_type": schema.StringAttribute{
+ Computed: true,
+ Description: "defines the nodeType it can be either single or HA",
+ MarkdownDescription: "defines the nodeType it can be either single or HA",
+ },
+ "storage_classes": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "class": schema.StringAttribute{
+ Computed: true,
+ },
+ "max_io_per_sec": schema.Int64Attribute{
+ Computed: true,
+ },
+ "max_through_in_mb": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: StorageClassesType{
+ ObjectType: types.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ },
+ },
+ CustomType: FlavorsType{
+ ObjectType: types.ObjectType{
+ AttrTypes: FlavorsValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "List of flavors available for the project.",
+ MarkdownDescription: "List of flavors available for the project.",
+ },
+ "page": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of the page of items list to be returned.",
+ MarkdownDescription: "Number of the page of items list to be returned.",
+ },
+ "pagination": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "page": schema.Int64Attribute{
+ Computed: true,
+ },
+ "size": schema.Int64Attribute{
+ Computed: true,
+ },
+ "sort": schema.StringAttribute{
+ Computed: true,
+ },
+ "total_pages": schema.Int64Attribute{
+ Computed: true,
+ },
+ "total_rows": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: PaginationType{
+ ObjectType: types.ObjectType{
+ AttrTypes: PaginationValue{}.AttributeTypes(ctx),
+ },
+ },
+ Computed: true,
+ },
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "size": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of items to be returned on each page.",
+ MarkdownDescription: "Number of items to be returned on each page.",
+ },
+ "sort": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "Sorting of the flavors to be returned on each page.",
+ MarkdownDescription: "Sorting of the flavors to be returned on each page.",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "index.desc",
+ "index.asc",
+ "cpu.desc",
+ "cpu.asc",
+ "flavor_description.asc",
+ "flavor_description.desc",
+ "id.desc",
+ "id.asc",
+ "size_max.desc",
+ "size_max.asc",
+ "ram.desc",
+ "ram.asc",
+ "size_min.desc",
+ "size_min.asc",
+ "storage_class.asc",
+ "storage_class.desc",
+ "node_type.asc",
+ "node_type.desc",
+ ),
+ },
+ },
+ },
+ }
+}
+
+type FlavorsModel struct {
+ Flavors types.List `tfsdk:"flavors"`
+ Page types.Int64 `tfsdk:"page"`
+ Pagination PaginationValue `tfsdk:"pagination"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Size types.Int64 `tfsdk:"size"`
+ Sort types.String `tfsdk:"sort"`
+}
+
+var _ basetypes.ObjectTypable = FlavorsType{}
+
+type FlavorsType struct {
+ basetypes.ObjectType
+}
+
+func (t FlavorsType) Equal(o attr.Type) bool {
+ other, ok := o.(FlavorsType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t FlavorsType) String() string {
+ return "FlavorsType"
+}
+
+func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ cpuAttribute, ok := attributes["cpu"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `cpu is missing from object`)
+
+ return nil, diags
+ }
+
+ cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ }
+
+ descriptionAttribute, ok := attributes["description"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `description is missing from object`)
+
+ return nil, diags
+ }
+
+ descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return nil, diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
+ }
+
+ maxGbAttribute, ok := attributes["max_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_gb is missing from object`)
+
+ return nil, diags
+ }
+
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ }
+
+ memoryAttribute, ok := attributes["memory"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `memory is missing from object`)
+
+ return nil, diags
+ }
+
+ memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ }
+
+ minGbAttribute, ok := attributes["min_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `min_gb is missing from object`)
+
+ return nil, diags
+ }
+
+ minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ }
+
+ nodeTypeAttribute, ok := attributes["node_type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `node_type is missing from object`)
+
+ return nil, diags
+ }
+
+ nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
+ }
+
+ storageClassesAttribute, ok := attributes["storage_classes"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `storage_classes is missing from object`)
+
+ return nil, diags
+ }
+
+ storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return FlavorsValue{
+ Cpu: cpuVal,
+ Description: descriptionVal,
+ Id: idVal,
+ MaxGb: maxGbVal,
+ Memory: memoryVal,
+ MinGb: minGbVal,
+ NodeType: nodeTypeVal,
+ StorageClasses: storageClassesVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewFlavorsValueNull() FlavorsValue {
+ return FlavorsValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewFlavorsValueUnknown() FlavorsValue {
+ return FlavorsValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing FlavorsValue Attribute Value",
+ "While creating a FlavorsValue value, a missing attribute value was detected. "+
+ "A FlavorsValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid FlavorsValue Attribute Type",
+ "While creating a FlavorsValue value, an invalid attribute value was detected. "+
+ "A FlavorsValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra FlavorsValue Attribute Value",
+ "While creating a FlavorsValue value, an extra attribute value was detected. "+
+ "A FlavorsValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ cpuAttribute, ok := attributes["cpu"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `cpu is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ }
+
+ descriptionAttribute, ok := attributes["description"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `description is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
+ }
+
+ maxGbAttribute, ok := attributes["max_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_gb is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ }
+
+ memoryAttribute, ok := attributes["memory"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `memory is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ }
+
+ minGbAttribute, ok := attributes["min_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `min_gb is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ }
+
+ nodeTypeAttribute, ok := attributes["node_type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `node_type is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
+ }
+
+ storageClassesAttribute, ok := attributes["storage_classes"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `storage_classes is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
+ }
+
+ if diags.HasError() {
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ return FlavorsValue{
+ Cpu: cpuVal,
+ Description: descriptionVal,
+ Id: idVal,
+ MaxGb: maxGbVal,
+ Memory: memoryVal,
+ MinGb: minGbVal,
+ NodeType: nodeTypeVal,
+ StorageClasses: storageClassesVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue {
+ object, diags := NewFlavorsValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewFlavorsValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewFlavorsValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewFlavorsValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
+ return FlavorsValue{}
+}
+
+var _ basetypes.ObjectValuable = FlavorsValue{}
+
+type FlavorsValue struct {
+ Cpu basetypes.Int64Value `tfsdk:"cpu"`
+ Description basetypes.StringValue `tfsdk:"description"`
+ Id basetypes.StringValue `tfsdk:"id"`
+ MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
+ Memory basetypes.Int64Value `tfsdk:"memory"`
+ MinGb basetypes.Int64Value `tfsdk:"min_gb"`
+ NodeType basetypes.StringValue `tfsdk:"node_type"`
+ StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
+ state attr.ValueState
+}
+
+func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 8)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["storage_classes"] = basetypes.ListType{
+ ElemType: StorageClassesValue{}.Type(ctx),
+ }.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 8)
+
+ val, err = v.Cpu.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["cpu"] = val
+
+ val, err = v.Description.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["description"] = val
+
+ val, err = v.Id.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["id"] = val
+
+ val, err = v.MaxGb.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["max_gb"] = val
+
+ val, err = v.Memory.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["memory"] = val
+
+ val, err = v.MinGb.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["min_gb"] = val
+
+ val, err = v.NodeType.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["node_type"] = val
+
+ val, err = v.StorageClasses.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["storage_classes"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v FlavorsValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v FlavorsValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v FlavorsValue) String() string {
+ return "FlavorsValue"
+}
+
+func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ storageClasses := types.ListValueMust(
+ StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ v.StorageClasses.Elements(),
+ )
+
+ if v.StorageClasses.IsNull() {
+ storageClasses = types.ListNull(
+ StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ )
+ }
+
+ if v.StorageClasses.IsUnknown() {
+ storageClasses = types.ListUnknown(
+ StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ )
+ }
+
+ attributeTypes := map[string]attr.Type{
+ "cpu": basetypes.Int64Type{},
+ "description": basetypes.StringType{},
+ "id": basetypes.StringType{},
+ "max_gb": basetypes.Int64Type{},
+ "memory": basetypes.Int64Type{},
+ "min_gb": basetypes.Int64Type{},
+ "node_type": basetypes.StringType{},
+ "storage_classes": basetypes.ListType{
+ ElemType: StorageClassesValue{}.Type(ctx),
+ },
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "cpu": v.Cpu,
+ "description": v.Description,
+ "id": v.Id,
+ "max_gb": v.MaxGb,
+ "memory": v.Memory,
+ "min_gb": v.MinGb,
+ "node_type": v.NodeType,
+ "storage_classes": storageClasses,
+ })
+
+ return objVal, diags
+}
+
+func (v FlavorsValue) Equal(o attr.Value) bool {
+ other, ok := o.(FlavorsValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Cpu.Equal(other.Cpu) {
+ return false
+ }
+
+ if !v.Description.Equal(other.Description) {
+ return false
+ }
+
+ if !v.Id.Equal(other.Id) {
+ return false
+ }
+
+ if !v.MaxGb.Equal(other.MaxGb) {
+ return false
+ }
+
+ if !v.Memory.Equal(other.Memory) {
+ return false
+ }
+
+ if !v.MinGb.Equal(other.MinGb) {
+ return false
+ }
+
+ if !v.NodeType.Equal(other.NodeType) {
+ return false
+ }
+
+ if !v.StorageClasses.Equal(other.StorageClasses) {
+ return false
+ }
+
+ return true
+}
+
+func (v FlavorsValue) Type(ctx context.Context) attr.Type {
+ return FlavorsType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "cpu": basetypes.Int64Type{},
+ "description": basetypes.StringType{},
+ "id": basetypes.StringType{},
+ "max_gb": basetypes.Int64Type{},
+ "memory": basetypes.Int64Type{},
+ "min_gb": basetypes.Int64Type{},
+ "node_type": basetypes.StringType{},
+ "storage_classes": basetypes.ListType{
+ ElemType: StorageClassesValue{}.Type(ctx),
+ },
+ }
+}
+
+var _ basetypes.ObjectTypable = StorageClassesType{}
+
+type StorageClassesType struct {
+ basetypes.ObjectType
+}
+
+func (t StorageClassesType) Equal(o attr.Type) bool {
+ other, ok := o.(StorageClassesType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t StorageClassesType) String() string {
+ return "StorageClassesType"
+}
+
+func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return nil, diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_io_per_sec is missing from object`)
+
+ return nil, diags
+ }
+
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ }
+
+ maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_through_in_mb is missing from object`)
+
+ return nil, diags
+ }
+
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return StorageClassesValue{
+ Class: classVal,
+ MaxIoPerSec: maxIoPerSecVal,
+ MaxThroughInMb: maxThroughInMbVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageClassesValueNull() StorageClassesValue {
+ return StorageClassesValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewStorageClassesValueUnknown() StorageClassesValue {
+ return StorageClassesValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing StorageClassesValue Attribute Value",
+ "While creating a StorageClassesValue value, a missing attribute value was detected. "+
+ "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid StorageClassesValue Attribute Type",
+ "While creating a StorageClassesValue value, an invalid attribute value was detected. "+
+ "A StorageClassesValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra StorageClassesValue Attribute Value",
+ "While creating a StorageClassesValue value, an extra attribute value was detected. "+
+ "A StorageClassesValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_io_per_sec is missing from object`)
+
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ }
+
+ maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_through_in_mb is missing from object`)
+
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ }
+
+ if diags.HasError() {
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ return StorageClassesValue{
+ Class: classVal,
+ MaxIoPerSec: maxIoPerSecVal,
+ MaxThroughInMb: maxThroughInMbVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue {
+ object, diags := NewStorageClassesValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewStorageClassesValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewStorageClassesValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewStorageClassesValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t StorageClassesType) ValueType(ctx context.Context) attr.Value {
+ return StorageClassesValue{}
+}
+
+var _ basetypes.ObjectValuable = StorageClassesValue{}
+
+type StorageClassesValue struct {
+ Class basetypes.StringValue `tfsdk:"class"`
+ MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
+ MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
+ state attr.ValueState
+}
+
+func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 3)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 3)
+
+ val, err = v.Class.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["class"] = val
+
+ val, err = v.MaxIoPerSec.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["max_io_per_sec"] = val
+
+ val, err = v.MaxThroughInMb.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["max_through_in_mb"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v StorageClassesValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v StorageClassesValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v StorageClassesValue) String() string {
+ return "StorageClassesValue"
+}
+
+func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "max_io_per_sec": basetypes.Int64Type{},
+ "max_through_in_mb": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "class": v.Class,
+ "max_io_per_sec": v.MaxIoPerSec,
+ "max_through_in_mb": v.MaxThroughInMb,
+ })
+
+ return objVal, diags
+}
+
+func (v StorageClassesValue) Equal(o attr.Value) bool {
+ other, ok := o.(StorageClassesValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Class.Equal(other.Class) {
+ return false
+ }
+
+ if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) {
+ return false
+ }
+
+ if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) {
+ return false
+ }
+
+ return true
+}
+
+func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
+ return StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "max_io_per_sec": basetypes.Int64Type{},
+ "max_through_in_mb": basetypes.Int64Type{},
+ }
+}
+
+var _ basetypes.ObjectTypable = PaginationType{}
+
+type PaginationType struct {
+ basetypes.ObjectType
+}
+
+func (t PaginationType) Equal(o attr.Type) bool {
+ other, ok := o.(PaginationType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t PaginationType) String() string {
+ return "PaginationType"
+}
+
+func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return nil, diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return nil, diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return nil, diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return nil, diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return nil, diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueNull() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewPaginationValueUnknown() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing PaginationValue Attribute Value",
+ "While creating a PaginationValue value, a missing attribute value was detected. "+
+ "A PaginationValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid PaginationValue Attribute Type",
+ "While creating a PaginationValue value, an invalid attribute value was detected. "+
+ "A PaginationValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra PaginationValue Attribute Value",
+ "While creating a PaginationValue value, an extra attribute value was detected. "+
+ "A PaginationValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
+ object, diags := NewPaginationValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewPaginationValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewPaginationValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewPaginationValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t PaginationType) ValueType(ctx context.Context) attr.Value {
+ return PaginationValue{}
+}
+
+var _ basetypes.ObjectValuable = PaginationValue{}
+
+type PaginationValue struct {
+ Page basetypes.Int64Value `tfsdk:"page"`
+ Size basetypes.Int64Value `tfsdk:"size"`
+ Sort basetypes.StringValue `tfsdk:"sort"`
+ TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ state attr.ValueState
+}
+
+func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 5)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 5)
+
+ val, err = v.Page.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["page"] = val
+
+ val, err = v.Size.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["size"] = val
+
+ val, err = v.Sort.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["sort"] = val
+
+ val, err = v.TotalPages.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_pages"] = val
+
+ val, err = v.TotalRows.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_rows"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v PaginationValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v PaginationValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v PaginationValue) String() string {
+ return "PaginationValue"
+}
+
+func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "page": v.Page,
+ "size": v.Size,
+ "sort": v.Sort,
+ "total_pages": v.TotalPages,
+ "total_rows": v.TotalRows,
+ })
+
+ return objVal, diags
+}
+
+func (v PaginationValue) Equal(o attr.Value) bool {
+ other, ok := o.(PaginationValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Page.Equal(other.Page) {
+ return false
+ }
+
+ if !v.Size.Equal(other.Size) {
+ return false
+ }
+
+ if !v.Sort.Equal(other.Sort) {
+ return false
+ }
+
+ if !v.TotalPages.Equal(other.TotalPages) {
+ return false
+ }
+
+ if !v.TotalRows.Equal(other.TotalRows) {
+ return false
+ }
+
+ return true
+}
+
+func (v PaginationValue) Type(ctx context.Context) attr.Type {
+ return PaginationType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go
index 842a4cfd..85834b26 100644
--- a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go
+++ b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go
@@ -6,6 +6,7 @@ import (
"net/http"
"github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/config"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
@@ -25,12 +26,22 @@ func NewInstanceDataSource() datasource.DataSource {
return &instanceDataSource{}
}
+// dataSourceModel maps the data source schema data.
+type dataSourceModel struct {
+ sqlserverflexbetaGen.InstanceModel
+ TerraformID types.String `tfsdk:"id"`
+}
+
type instanceDataSource struct {
client *sqlserverflexbetaPkg.APIClient
providerData core.ProviderData
}
-func (d *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+func (d *instanceDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance"
}
@@ -81,7 +92,7 @@ func (d *instanceDataSource) Configure(
}
func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data sqlserverflexbetaGen.InstanceModel
+ var data dataSourceModel
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go
index 87476c3c..f3226581 100644
--- a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go
+++ b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go
@@ -65,7 +65,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
- "id": schema.StringAttribute{
+ "tf_original_api_id": schema.StringAttribute{
Computed: true,
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
@@ -178,7 +178,7 @@ type InstanceModel struct {
Edition types.String `tfsdk:"edition"`
Encryption EncryptionValue `tfsdk:"encryption"`
FlavorId types.String `tfsdk:"flavor_id"`
- Id types.String `tfsdk:"id"`
+ Id types.String `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
IsDeletable types.Bool `tfsdk:"is_deletable"`
Name types.String `tfsdk:"name"`
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go
index e9e1db57..25f3af0c 100644
--- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go
+++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go
@@ -84,7 +84,7 @@ func mapResponseToModel(
func mapDataResponseToModel(
ctx context.Context,
resp *sqlserverflexbeta.GetInstanceResponse,
- m *sqlserverflexbetaDataGen.InstanceModel,
+ m *dataSourceModel,
tfDiags diag.Diagnostics,
) error {
m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
@@ -181,7 +181,7 @@ func handleEncryption(
}
func handleDSEncryption(
- m *sqlserverflexbetaDataGen.InstanceModel,
+ m *dataSourceModel,
resp *sqlserverflexbeta.GetInstanceResponse,
) sqlserverflexbetaDataGen.EncryptionValue {
if !resp.HasEncryption() ||
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go
index 5d0b47d8..5ccfa3c0 100644
--- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go
+++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go
@@ -43,21 +43,48 @@ type instanceResource struct {
providerData core.ProviderData
}
+// resourceModel describes the resource data model.
+type resourceModel = sqlserverflexbetaResGen.InstanceModel
+
type InstanceResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
InstanceID types.String `tfsdk:"instance_id"`
}
-func (r *instanceResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+func (r *instanceResource) Metadata(
+ ctx context.Context,
+ req resource.MetadataRequest,
+ resp *resource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance"
}
+//go:embed planModifiers.yaml
+var modifiersFileByte []byte
+
func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaResGen.InstanceResourceSchema(ctx)
+ s := sqlserverflexbetaResGen.InstanceResourceSchema(ctx)
+
+ fields, err := utils.ReadModifiersConfig(modifiersFileByte)
+ if err != nil {
+ resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
+ return
+ }
+
+ err = utils.AddPlanModifiersToResourceSchema(fields, &s)
+ if err != nil {
+ resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
+ return
+ }
+ resp.Schema = s
}
-func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
+func (r *instanceResource) IdentitySchema(
+ _ context.Context,
+ _ resource.IdentitySchemaRequest,
+ resp *resource.IdentitySchemaResponse,
+) {
resp.IdentitySchema = identityschema.Schema{
Attributes: map[string]identityschema.Attribute{
"project_id": identityschema.StringAttribute{
@@ -90,7 +117,10 @@ func (r *instanceResource) Configure(
utils.UserAgentConfigOption(r.providerData.Version),
}
if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint))
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
+ )
} else {
apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
}
@@ -121,7 +151,7 @@ func (r *instanceResource) ModifyPlan(
if req.Config.Raw.IsNull() {
return
}
- var configModel sqlserverflexbetaResGen.InstanceModel
+ var configModel resourceModel
resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
if resp.Diagnostics.HasError() {
return
@@ -147,11 +177,8 @@ func (r *instanceResource) ModifyPlan(
}
}
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data sqlserverflexbetaResGen.InstanceModel
+ var data resourceModel
crateErr := "[SQL Server Flex BETA - Create] error"
// Read Terraform plan data into the model
@@ -257,7 +284,7 @@ func (r *instanceResource) Create(ctx context.Context, req resource.CreateReques
}
func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data sqlserverflexbetaResGen.InstanceModel
+ var data resourceModel
// Read Terraform prior state data into the model
resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
@@ -324,7 +351,7 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r
}
func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data sqlserverflexbetaResGen.InstanceModel
+ var data resourceModel
updateInstanceError := "Error updating instance"
resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
@@ -411,7 +438,7 @@ func (r *instanceResource) Update(ctx context.Context, req resource.UpdateReques
}
func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data sqlserverflexbetaResGen.InstanceModel
+ var data resourceModel
// Read Terraform prior state data into the model
resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
@@ -484,9 +511,13 @@ func (r *instanceResource) ImportState(
idParts := strings.Split(req.ID, core.Separator)
if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
- core.LogAndAddError(ctx, &resp.Diagnostics,
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
"Error importing instance",
- fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID),
+ fmt.Sprintf(
+ "Expected import identifier with format: [project_id],[region],[instance_id] Got: %q",
+ req.ID,
+ ),
)
return
}
@@ -497,25 +528,20 @@ func (r *instanceResource) ImportState(
return
}
+ // If no ID is provided, attempt to read identity attributes from the import configuration
var identityData InstanceResourceIdentityModel
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
if resp.Diagnostics.HasError() {
return
}
- resp.Diagnostics.Append(
- resp.State.SetAttribute(
- ctx,
- path.Root("id"),
- utils.BuildInternalTerraformId(
- identityData.ProjectID.ValueString(),
- identityData.Region.ValueString(),
- identityData.InstanceID.ValueString(),
- ),
- )...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...)
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
tflog.Info(ctx, "Sqlserverflexbeta instance state imported")
}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go b/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go
index 64acf850..effced4e 100644
--- a/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go
+++ b/stackit/internal/services/sqlserverflexbeta/instance/resource_test.go
@@ -13,6 +13,7 @@ import (
"github.com/hashicorp/terraform-plugin-testing/helper/acctest"
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/knownvalue"
+ "github.com/hashicorp/terraform-plugin-testing/plancheck"
"github.com/hashicorp/terraform-plugin-testing/statecheck"
"github.com/hashicorp/terraform-plugin-testing/terraform"
"github.com/hashicorp/terraform-plugin-testing/tfjsonpath"
@@ -160,6 +161,12 @@ func TestAccResourceExample_basic(t *testing.T) {
// test create
{
Config: exBefore,
+ ConfigPlanChecks: resource.ConfigPlanChecks{
+ PreApply: []plancheck.PlanCheck{
+ plancheck.ExpectResourceAction(resName, plancheck.ResourceActionCreate),
+ plancheck.ExpectNonEmptyPlan(),
+ },
+ },
ConfigStateChecks: []statecheck.StateCheck{
compareValuesSame.AddStateValue(
resName,
diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go
index df1a8033..e6491a0f 100644
--- a/stackit/internal/services/sqlserverflexbeta/user/datasource.go
+++ b/stackit/internal/services/sqlserverflexbeta/user/datasource.go
@@ -6,6 +6,7 @@ import (
"net/http"
"github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
@@ -25,12 +26,30 @@ func NewUserDataSource() datasource.DataSource {
return &userDataSource{}
}
+type dataSourceModel struct {
+ DefaultDatabase types.String `tfsdk:"default_database"`
+ Host types.String `tfsdk:"host"`
+ Id types.Int64 `tfsdk:"id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Port types.Int64 `tfsdk:"port"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Roles types.List `tfsdk:"roles"`
+ Status types.String `tfsdk:"status"`
+ UserId types.Int64 `tfsdk:"user_id"`
+ Username types.String `tfsdk:"username"`
+}
+
type userDataSource struct {
client *sqlserverflexbetaPkg.APIClient
providerData core.ProviderData
}
-func (d *userDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+func (d *userDataSource) Metadata(
+ _ context.Context,
+ req datasource.MetadataRequest,
+ resp *datasource.MetadataResponse,
+) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
}
@@ -59,7 +78,7 @@ func (d *userDataSource) Configure(
}
func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data sqlserverflexbetaGen.UserModel
+ var data dataSourceModel
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
@@ -72,13 +91,15 @@ func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, r
projectId := data.ProjectId.ValueString()
region := d.providerData.GetRegionWithOverride(data.Region)
- userId := data.UserId.ValueString()
+ instanceId := data.InstanceId.ValueString()
+ userId := data.UserId.ValueInt64()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "user_id", userId)
- userResp, err := d.client.GetUserRequest(ctx, projectId, region, userId).Execute()
+ userResp, err := d.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
if err != nil {
utils.LogError(
ctx,
diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go
index 1950c24e..34aef9ca 100644
--- a/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go
+++ b/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go
@@ -98,7 +98,7 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
"users": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
- "id": schema.Int64Attribute{
+ "tf_original_api_id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
diff --git a/stackit/internal/services/sqlserverflexbeta/user/functions.go b/stackit/internal/services/sqlserverflexbeta/user/functions.go
index b565f761..83ce641f 100644
--- a/stackit/internal/services/sqlserverflexbeta/user/functions.go
+++ b/stackit/internal/services/sqlserverflexbeta/user/functions.go
@@ -3,13 +3,9 @@ package sqlserverflexbeta
import (
"context"
"fmt"
- "math"
- "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/types"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
@@ -18,11 +14,39 @@ import (
func mapResponseToModel(
ctx context.Context,
resp *sqlserverflexbeta.GetUserResponse,
- m *sqlserverflexbetaResGen.UserModel,
+ m *dataSourceModel,
tfDiags diag.Diagnostics,
) error {
+ if resp == nil {
+ return fmt.Errorf("response is nil")
+ }
+
+ m.Id = types.Int64Value(resp.GetId())
+ m.UserId = types.Int64Value(resp.GetId())
+ m.Username = types.StringValue(resp.GetUsername())
+ m.Port = types.Int64Value(resp.GetPort())
+ m.Host = types.StringValue(resp.GetHost())
+ m.DefaultDatabase = types.StringValue(resp.GetDefaultDatabase())
+ m.Status = types.StringValue(resp.GetStatus())
+
+ if resp.Roles != nil {
+ roles, diags := types.ListValueFrom(ctx, types.StringType, *resp.Roles)
+ tfDiags.Append(diags...)
+ if tfDiags.HasError() {
+ return fmt.Errorf("failed to map roles")
+ }
+ m.Roles = roles
+ } else {
+ m.Roles = types.ListNull(types.StringType)
+ }
+
+ if resp.Status != nil {
+ m.Status = types.StringValue(*resp.Status)
+ } else {
+ m.Status = types.StringNull()
+ }
+
// TODO: complete and refactor
- m.Id = types.StringValue(resp.GetId())
/*
sampleList, diags := types.ListValueFrom(ctx, types.StringType, resp.GetList())
@@ -51,48 +75,63 @@ func mapResponseToModel(
return nil
}
+// TODO: handle encryption field mapping when API supports it
func handleEncryption(
- m *sqlserverflexbetaResGen.UserModel,
+ m *dataSourceModel,
resp *sqlserverflexbeta.GetUserResponse,
) sqlserverflexbetaResGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == nil ||
- resp.Encryption.KekKeyRingId == nil ||
- resp.Encryption.KekKeyVersion == nil ||
- resp.Encryption.ServiceAccount == nil {
+ /*
+ if !resp.HasEncryption() ||
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return sqlserverflexbetaResGen.NewEncryptionValueNull()
+ resp.Encryption == nil ||
+ resp.Encryption.KekKeyId == nil ||
+ resp.Encryption.KekKeyRingId == nil ||
+ resp.Encryption.KekKeyVersion == nil ||
+ resp.Encryption.ServiceAccount == nil {
+
+ if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
+ return sqlserverflexbetaResGen.NewEncryptionValueNull()
+ }
+ return m.Encryption
}
- return m.Encryption
- }
- enc := sqlserverflexbetaResGen.NewEncryptionValueNull()
- if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
- enc.KekKeyId = types.StringValue(kVal)
- }
- if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
- enc.KekKeyRingId = types.StringValue(kkVal)
- }
- if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
- enc.KekKeyVersion = types.StringValue(kkvVal)
- }
- if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
- enc.ServiceAccount = types.StringValue(sa)
- }
- return enc
+ enc := sqlserverflexbetaResGen.NewEncryptionValueNull()
+ if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
+ enc.KekKeyId = types.StringValue(kVal)
+ }
+ if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
+ enc.KekKeyRingId = types.StringValue(kkVal)
+ }
+ if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
+ enc.KekKeyVersion = types.StringValue(kkvVal)
+ }
+ if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
+ enc.ServiceAccount = types.StringValue(sa)
+ }
+ return enc
+ */
+ return sqlserverflexbetaResGen.NewEncryptionValueNull()
}
func toCreatePayload(
ctx context.Context,
- model *sqlserverflexbetaResGen.UserModel,
+ model *dataSourceModel,
) (*sqlserverflexbeta.CreateUserRequestPayload, error) {
if model == nil {
return nil, fmt.Errorf("nil model")
}
+ var roles []sqlserverflexbeta.UserRole
+ if !model.Roles.IsNull() && !model.Roles.IsUnknown() {
+ diags := model.Roles.ElementsAs(ctx, &roles, false)
+ if diags.HasError() {
+ return nil, fmt.Errorf("failed to convert roles: %v", diags)
+ }
+ }
+
return &sqlserverflexbeta.CreateUserRequestPayload{
- // TODO: fill fields
+ DefaultDatabase: model.DefaultDatabase.ValueStringPointer(),
+ Username: model.Username.ValueStringPointer(),
+ Roles: &roles,
}, nil
}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml
new file mode 100644
index 00000000..fe4025ee
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml
@@ -0,0 +1,51 @@
+fields:
+ - name: 'id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'user_id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'instance_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'project_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'region'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'user_id'
+ modifiers:
+ - 'RequiresReplace'
+
+ - name: 'username'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'roles'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'password'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'uri'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'status'
+ modifiers:
+ - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go
index e2692e13..40f78c7f 100644
--- a/stackit/internal/services/sqlserverflexbeta/user/resource.go
+++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go
@@ -2,7 +2,9 @@ package sqlserverflexbeta
import (
"context"
+ _ "embed"
"fmt"
+ "strconv"
"strings"
"github.com/hashicorp/terraform-plugin-framework/path"
@@ -11,9 +13,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/config"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
@@ -33,27 +34,52 @@ func NewUserResource() resource.Resource {
return &userResource{}
}
+// resourceModel describes the resource data model.
+type resourceModel = sqlserverflexbetaResGen.UserModel
+
+// UserResourceIdentityModel describes the resource's identity attributes.
+type UserResourceIdentityModel struct {
+ ProjectID types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ InstanceID types.String `tfsdk:"instance_id"`
+ UserID types.Int64 `tfsdk:"database_id"`
+}
+
type userResource struct {
client *sqlserverflexbeta.APIClient
providerData core.ProviderData
}
-type UserResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- UserID types.String `tfsdk:"instance_id"`
- // TODO: implement further needed parts
-}
-
func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
}
-func (r *userResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaResGen.UserResourceSchema(ctx)
+//go:embed planModifiers.yaml
+var modifiersFileByte []byte
+
+func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+
+ s := sqlserverflexbetaResGen.UserResourceSchema(ctx)
+
+ fields, err := utils.ReadModifiersConfig(modifiersFileByte)
+ if err != nil {
+ resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
+ return
+ }
+
+ err = utils.AddPlanModifiersToResourceSchema(fields, &s)
+ if err != nil {
+ resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
+ return
+ }
+ resp.Schema = s
}
-func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
+func (r *userResource) IdentitySchema(
+ _ context.Context,
+ _ resource.IdentitySchemaRequest,
+ resp *resource.IdentitySchemaResponse,
+) {
resp.IdentitySchema = identityschema.Schema{
Attributes: map[string]identityschema.Attribute{
"project_id": identityschema.StringAttribute{
@@ -85,8 +111,11 @@ func (r *userResource) Configure(
config.WithCustomAuth(r.providerData.RoundTripper),
utils.UserAgentConfigOption(r.providerData.Version),
}
- if r.providerData.SqlserverflexbetaCustomEndpoint != "" {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.sqlserverflexbetaCustomEndpoint))
+ if r.providerData.SQLServerFlexCustomEndpoint != "" {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
+ )
} else {
apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
}
@@ -106,7 +135,7 @@ func (r *userResource) Configure(
}
func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data sqlserverflexbetaResGen.UserModel
+ var data resourceModel
// Read Terraform plan data into the model
resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
@@ -159,14 +188,14 @@ func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, r
*/
// Example data value setting
- data.UserId = types.StringValue("id-from-response")
+ //data.UserId = types.StringValue("id-from-response")
// TODO: Set data returned by API in identity
identity := UserResourceIdentityModel{
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
// TODO: add missing values
- UserID: types.StringValue(UserId),
+ // UserID: types.StringValue(UserId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@@ -228,7 +257,7 @@ func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, r
}
func (r *userResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data sqlserverflexbetaResGen.UserModel
+ var data resourceModel
// Read Terraform prior state data into the model
resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
@@ -270,7 +299,7 @@ func (r *userResource) Read(ctx context.Context, req resource.ReadRequest, resp
}
func (r *userResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data sqlserverflexbetaResGen.UserModel
+ var data resourceModel
// Read Terraform prior state data into the model
resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
@@ -301,7 +330,7 @@ func (r *userResource) Update(ctx context.Context, req resource.UpdateRequest, r
}
func (r *userResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data sqlserverflexbetaResGen.UserModel
+ var data resourceModel
// Read Terraform prior state data into the model
resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
@@ -335,7 +364,7 @@ func (r *userResource) ModifyPlan(
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
- var configModel sqlserverflexbetaResGen.UserModel
+ var configModel resourceModel
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
@@ -345,7 +374,7 @@ func (r *userResource) ModifyPlan(
return
}
- var planModel sqlserverflexbetaResGen.UserModel
+ var planModel resourceModel
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -382,24 +411,61 @@ func (r *userResource) ImportState(
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- idParts := strings.Split(req.ID, core.Separator)
+ ctx = core.InitProviderContext(ctx)
+
+ if req.ID != "" {
+
+ idParts := strings.Split(req.ID, core.Separator)
+
+ if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing user",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
+ req.ID,
+ ),
+ )
+ return
+ }
+
+ userId, err := strconv.ParseInt(idParts[3], 10, 64)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error importing user",
+ fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
+
+ tflog.Info(ctx, "Sqlserverflexbeta user state imported")
- // Todo: Import logic
- if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],..., got %q",
- req.ID,
- ),
- )
return
}
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- // ... more ...
+ // If no ID is provided, attempt to read identity attributes from the import configuration
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+ userId := identityData.UserID.ValueInt64()
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
core.LogAndAddWarning(
ctx,
@@ -408,4 +474,5 @@ func (r *userResource) ImportState(
"The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
)
tflog.Info(ctx, "Sqlserverflexbeta user state imported")
+
}
diff --git a/stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go
new file mode 100644
index 00000000..239b44d3
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go
@@ -0,0 +1,569 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func VersionDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "versions": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "beta": schema.BoolAttribute{
+ Computed: true,
+ Description: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.",
+ MarkdownDescription: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.",
+ },
+ "deprecated": schema.StringAttribute{
+ Computed: true,
+ Description: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.",
+ MarkdownDescription: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.",
+ },
+ "recommend": schema.BoolAttribute{
+ Computed: true,
+ Description: "Flag if the version is recommend by the STACKIT Team.",
+ MarkdownDescription: "Flag if the version is recommend by the STACKIT Team.",
+ },
+ "version": schema.StringAttribute{
+ Computed: true,
+ Description: "The sqlserver version used for the instance.",
+ MarkdownDescription: "The sqlserver version used for the instance.",
+ },
+ },
+ CustomType: VersionsType{
+ ObjectType: types.ObjectType{
+ AttrTypes: VersionsValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "A list containing available sqlserver versions.",
+ MarkdownDescription: "A list containing available sqlserver versions.",
+ },
+ },
+ }
+}
+
+type VersionModel struct {
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Versions types.List `tfsdk:"versions"`
+}
+
+var _ basetypes.ObjectTypable = VersionsType{}
+
+type VersionsType struct {
+ basetypes.ObjectType
+}
+
+func (t VersionsType) Equal(o attr.Type) bool {
+ other, ok := o.(VersionsType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t VersionsType) String() string {
+ return "VersionsType"
+}
+
+func (t VersionsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ betaAttribute, ok := attributes["beta"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `beta is missing from object`)
+
+ return nil, diags
+ }
+
+ betaVal, ok := betaAttribute.(basetypes.BoolValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute))
+ }
+
+ deprecatedAttribute, ok := attributes["deprecated"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `deprecated is missing from object`)
+
+ return nil, diags
+ }
+
+ deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute))
+ }
+
+ recommendAttribute, ok := attributes["recommend"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `recommend is missing from object`)
+
+ return nil, diags
+ }
+
+ recommendVal, ok := recommendAttribute.(basetypes.BoolValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute))
+ }
+
+ versionAttribute, ok := attributes["version"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `version is missing from object`)
+
+ return nil, diags
+ }
+
+ versionVal, ok := versionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return VersionsValue{
+ Beta: betaVal,
+ Deprecated: deprecatedVal,
+ Recommend: recommendVal,
+ Version: versionVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewVersionsValueNull() VersionsValue {
+ return VersionsValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewVersionsValueUnknown() VersionsValue {
+ return VersionsValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewVersionsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (VersionsValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing VersionsValue Attribute Value",
+ "While creating a VersionsValue value, a missing attribute value was detected. "+
+ "A VersionsValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid VersionsValue Attribute Type",
+ "While creating a VersionsValue value, an invalid attribute value was detected. "+
+ "A VersionsValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("VersionsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra VersionsValue Attribute Value",
+ "While creating a VersionsValue value, an extra attribute value was detected. "+
+ "A VersionsValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra VersionsValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewVersionsValueUnknown(), diags
+ }
+
+ betaAttribute, ok := attributes["beta"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `beta is missing from object`)
+
+ return NewVersionsValueUnknown(), diags
+ }
+
+ betaVal, ok := betaAttribute.(basetypes.BoolValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute))
+ }
+
+ deprecatedAttribute, ok := attributes["deprecated"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `deprecated is missing from object`)
+
+ return NewVersionsValueUnknown(), diags
+ }
+
+ deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute))
+ }
+
+ recommendAttribute, ok := attributes["recommend"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `recommend is missing from object`)
+
+ return NewVersionsValueUnknown(), diags
+ }
+
+ recommendVal, ok := recommendAttribute.(basetypes.BoolValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute))
+ }
+
+ versionAttribute, ok := attributes["version"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `version is missing from object`)
+
+ return NewVersionsValueUnknown(), diags
+ }
+
+ versionVal, ok := versionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
+ }
+
+ if diags.HasError() {
+ return NewVersionsValueUnknown(), diags
+ }
+
+ return VersionsValue{
+ Beta: betaVal,
+ Deprecated: deprecatedVal,
+ Recommend: recommendVal,
+ Version: versionVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewVersionsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) VersionsValue {
+ object, diags := NewVersionsValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewVersionsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t VersionsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewVersionsValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewVersionsValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewVersionsValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewVersionsValueMust(VersionsValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t VersionsType) ValueType(ctx context.Context) attr.Value {
+ return VersionsValue{}
+}
+
+var _ basetypes.ObjectValuable = VersionsValue{}
+
+type VersionsValue struct {
+ Beta basetypes.BoolValue `tfsdk:"beta"`
+ Deprecated basetypes.StringValue `tfsdk:"deprecated"`
+ Recommend basetypes.BoolValue `tfsdk:"recommend"`
+ Version basetypes.StringValue `tfsdk:"version"`
+ state attr.ValueState
+}
+
+func (v VersionsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 4)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["beta"] = basetypes.BoolType{}.TerraformType(ctx)
+ attrTypes["deprecated"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["recommend"] = basetypes.BoolType{}.TerraformType(ctx)
+ attrTypes["version"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 4)
+
+ val, err = v.Beta.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["beta"] = val
+
+ val, err = v.Deprecated.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["deprecated"] = val
+
+ val, err = v.Recommend.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["recommend"] = val
+
+ val, err = v.Version.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["version"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v VersionsValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v VersionsValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v VersionsValue) String() string {
+ return "VersionsValue"
+}
+
+func (v VersionsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "beta": basetypes.BoolType{},
+ "deprecated": basetypes.StringType{},
+ "recommend": basetypes.BoolType{},
+ "version": basetypes.StringType{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "beta": v.Beta,
+ "deprecated": v.Deprecated,
+ "recommend": v.Recommend,
+ "version": v.Version,
+ })
+
+ return objVal, diags
+}
+
+func (v VersionsValue) Equal(o attr.Value) bool {
+ other, ok := o.(VersionsValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Beta.Equal(other.Beta) {
+ return false
+ }
+
+ if !v.Deprecated.Equal(other.Deprecated) {
+ return false
+ }
+
+ if !v.Recommend.Equal(other.Recommend) {
+ return false
+ }
+
+ if !v.Version.Equal(other.Version) {
+ return false
+ }
+
+ return true
+}
+
+func (v VersionsValue) Type(ctx context.Context) attr.Type {
+ return VersionsType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v VersionsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "beta": basetypes.BoolType{},
+ "deprecated": basetypes.StringType{},
+ "recommend": basetypes.BoolType{},
+ "version": basetypes.StringType{},
+ }
+}
diff --git a/stackit/internal/services/postgresflexalpha/utils/planModifiers.go b/stackit/internal/utils/planModifiers.go
similarity index 100%
rename from stackit/internal/services/postgresflexalpha/utils/planModifiers.go
rename to stackit/internal/utils/planModifiers.go
diff --git a/stackit/internal/utils/planModifiers_test.go b/stackit/internal/utils/planModifiers_test.go
new file mode 100644
index 00000000..337ea36f
--- /dev/null
+++ b/stackit/internal/utils/planModifiers_test.go
@@ -0,0 +1,224 @@
+package utils
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+)
+
+func TestReadModifiersConfig(t *testing.T) {
+ testcases := []struct {
+ name string
+ content []byte
+ wantErr bool
+ }{
+ {
+ name: "valid yaml",
+ content: []byte(`
+fields:
+ - name: 'id'
+ modifiers:
+ - 'UseStateForUnknown'
+`),
+ wantErr: false,
+ },
+ {
+ name: "invalid yaml",
+ content: []byte(`invalid: yaml: :`),
+ wantErr: true,
+ },
+ }
+
+ for _, tc := range testcases {
+ t.Run(
+ tc.name, func(t *testing.T) {
+ _, err := ReadModifiersConfig(tc.content)
+ if (err != nil) != tc.wantErr {
+ t.Errorf("ReadModifiersConfig() error = %v, wantErr %v", err, tc.wantErr)
+ }
+ },
+ )
+ }
+}
+
+func TestAddPlanModifiersToResourceSchema(t *testing.T) {
+ testcases := []struct {
+ name string
+ fields *Fields
+ sch *schema.Schema
+ wantErr bool
+ }{
+ {
+ name: "full coverage - all types and nested structures",
+ fields: &Fields{
+ Fields: []*Field{
+ {
+ Name: "string_attr",
+ Modifiers: []*string{utils.Ptr("RequiresReplace"), utils.Ptr("UseStateForUnknown")},
+ },
+ {Name: "bool_attr", Modifiers: []*string{utils.Ptr("RequiresReplace")}},
+ {Name: "int_attr", Modifiers: []*string{utils.Ptr("UseStateForUnknown")}},
+ {Name: "list_attr", Modifiers: []*string{utils.Ptr("RequiresReplace")}},
+ {Name: "Nested.sub_string", Modifiers: []*string{utils.Ptr("RequiresReplace")}},
+ },
+ },
+ sch: &schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "StringAttr": schema.StringAttribute{},
+ "BoolAttr": schema.BoolAttribute{},
+ "IntAttr": schema.Int64Attribute{},
+ "ListAttr": schema.ListAttribute{},
+ "Nested": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "SubString": schema.StringAttribute{},
+ },
+ },
+ "Unsupported": schema.MapAttribute{ElementType: types.StringType}, // Triggers default/warn case
+ },
+ },
+ wantErr: false,
+ },
+ {
+ name: "validation error - invalid modifier",
+ fields: &Fields{
+ Fields: []*Field{
+ {Name: "id", Modifiers: []*string{utils.Ptr("InvalidModifier")}},
+ },
+ },
+ sch: &schema.Schema{
+ Attributes: map[string]schema.Attribute{"id": schema.StringAttribute{}},
+ },
+ wantErr: true,
+ },
+ {
+ name: "validation error - empty modifier",
+ fields: &Fields{
+ Fields: []*Field{
+ {Name: "id", Modifiers: []*string{utils.Ptr("")}},
+ },
+ },
+ sch: &schema.Schema{},
+ wantErr: true,
+ },
+ {
+ name: "nil fields - should return nil",
+ fields: nil,
+ sch: &schema.Schema{},
+ wantErr: false,
+ },
+ }
+
+ for _, tc := range testcases {
+ t.Run(
+ tc.name, func(t *testing.T) {
+ err := AddPlanModifiersToResourceSchema(tc.fields, tc.sch)
+
+ if (err != nil) != tc.wantErr {
+ t.Fatalf("AddPlanModifiersToResourceSchema() error = %v, wantErr %v", err, tc.wantErr)
+ }
+
+ if !tc.wantErr && tc.name == "full coverage - all types and nested structures" {
+ // Check StringAttr
+ if sAttr, ok := tc.sch.Attributes["StringAttr"].(schema.StringAttribute); ok {
+ if len(sAttr.PlanModifiers) != 2 {
+ t.Errorf("StringAttr: expected 2 modifiers, got %d", len(sAttr.PlanModifiers))
+ }
+ }
+
+ // Check Nested Sub-Attribute
+ if nested, ok := tc.sch.Attributes["Nested"].(schema.SingleNestedAttribute); ok {
+ if subAttr, ok := nested.Attributes["SubString"].(schema.StringAttribute); ok {
+ if len(subAttr.PlanModifiers) != 1 {
+ // Dies schlug vorher fehl, weil der Prefix "Nested" statt "nested" war
+ t.Errorf("Nested SubString: expected 1 modifier, got %d", len(subAttr.PlanModifiers))
+ }
+ } else {
+ t.Error("SubString attribute not found in Nested")
+ }
+ } else {
+ t.Error("Nested attribute not found")
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestFieldListToMap(t *testing.T) {
+ testcases := []struct {
+ name string
+ fields *Fields
+ want map[string][]*string
+ }{
+ {
+ name: "convert list to map",
+ fields: &Fields{
+ Fields: []*Field{
+ {Name: "test", Modifiers: []*string{utils.Ptr("mod")}},
+ },
+ },
+ want: map[string][]*string{
+ "test": {utils.Ptr("mod")},
+ },
+ },
+ {
+ name: "nil fields",
+ fields: nil,
+ want: map[string][]*string{},
+ },
+ }
+
+ for _, tc := range testcases {
+ t.Run(
+ tc.name, func(t *testing.T) {
+ got := fieldListToMap(tc.fields)
+ if diff := cmp.Diff(tc.want, got); diff != "" {
+ t.Errorf("fieldListToMap() mismatch (-want +got):\n%s", diff)
+ }
+ },
+ )
+ }
+}
+
+func TestHandleTypeMismatches(t *testing.T) {
+ modifiers := []*string{utils.Ptr("RequiresReplace")}
+
+ t.Run(
+ "bool type mismatch", func(t *testing.T) {
+ _, err := handleBoolPlanModifiers(schema.StringAttribute{}, modifiers)
+ if err == nil {
+ t.Error("expected error for type mismatch in handleBoolPlanModifiers")
+ }
+ },
+ )
+
+ t.Run(
+ "string type mismatch", func(t *testing.T) {
+ _, err := handleStringPlanModifiers(schema.BoolAttribute{}, modifiers)
+ if err == nil {
+ t.Error("expected error for type mismatch in handleStringPlanModifiers")
+ }
+ },
+ )
+
+ t.Run(
+ "int64 type mismatch", func(t *testing.T) {
+ _, err := handleInt64PlanModifiers(schema.StringAttribute{}, modifiers)
+ if err == nil {
+ t.Error("expected error for type mismatch in handleInt64PlanModifiers")
+ }
+ },
+ )
+
+ t.Run(
+ "list type mismatch", func(t *testing.T) {
+ _, err := handleListPlanModifiers(schema.StringAttribute{}, modifiers)
+ if err == nil {
+ t.Error("expected error for type mismatch in handleListPlanModifiers")
+ }
+ },
+ )
+}
diff --git a/stackit/internal/wait/sqlserverflexbeta/wait_test.go b/stackit/internal/wait/sqlserverflexbeta/wait_test.go
index e1ccc9c5..35b66cf6 100644
--- a/stackit/internal/wait/sqlserverflexbeta/wait_test.go
+++ b/stackit/internal/wait/sqlserverflexbeta/wait_test.go
@@ -20,7 +20,30 @@ type apiClientInstanceMocked struct {
instanceGetFails bool
}
-func (a *apiClientInstanceMocked) GetInstanceRequestExecute(_ context.Context, _, _, _ string) (*sqlserverflex.GetInstanceResponse, error) {
+func (a *apiClientInstanceMocked) GetDatabaseRequestExecute(
+ _ context.Context,
+ projectId string,
+ region string,
+ instanceId string,
+ databaseName string,
+) (*sqlserverflex.GetDatabaseResponse, error) {
+ return nil, nil
+}
+
+func (a *apiClientInstanceMocked) GetUserRequestExecute(
+ ctx context.Context,
+ projectId string,
+ region string,
+ instanceId string,
+ userId int64,
+) (*sqlserverflex.GetUserResponse, error) {
+ return nil, nil
+}
+
+func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
+ _ context.Context,
+ _, _, _ string,
+) (*sqlserverflex.GetInstanceResponse, error) {
if a.instanceGetFails {
return nil, &oapierror.GenericOpenAPIError{
StatusCode: 500,
@@ -111,26 +134,28 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
},
}
for _, tt := range tests {
- t.Run(tt.desc, func(t *testing.T) {
- instanceId := "foo-bar"
+ t.Run(
+ tt.desc, func(t *testing.T) {
+ instanceId := "foo-bar"
- apiClient := &apiClientInstanceMocked{
- instanceId: instanceId,
- instanceState: tt.instanceState,
- instanceGetFails: tt.instanceGetFails,
- }
+ apiClient := &apiClientInstanceMocked{
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ instanceGetFails: tt.instanceGetFails,
+ }
- handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
+ handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
+ gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
+ if (err != nil) != tt.wantErr {
+ t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
+ }
- if !cmp.Equal(gotRes, tt.wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes)
- }
- })
+ if !cmp.Equal(gotRes, tt.wantRes) {
+ t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes)
+ }
+ },
+ )
}
}
@@ -179,34 +204,36 @@ func TestUpdateInstanceWaitHandler(t *testing.T) {
},
}
for _, tt := range tests {
- t.Run(tt.desc, func(t *testing.T) {
- instanceId := "foo-bar"
+ t.Run(
+ tt.desc, func(t *testing.T) {
+ instanceId := "foo-bar"
- apiClient := &apiClientInstanceMocked{
- instanceId: instanceId,
- instanceState: tt.instanceState,
- instanceGetFails: tt.instanceGetFails,
- }
-
- var wantRes *sqlserverflex.GetInstanceResponse
- if tt.wantResp {
- wantRes = &sqlserverflex.GetInstanceResponse{
- Id: &instanceId,
- Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)),
+ apiClient := &apiClientInstanceMocked{
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ instanceGetFails: tt.instanceGetFails,
}
- }
- handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
+ var wantRes *sqlserverflex.GetInstanceResponse
+ if tt.wantResp {
+ wantRes = &sqlserverflex.GetInstanceResponse{
+ Id: &instanceId,
+ Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)),
+ }
+ }
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
+ handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- if !cmp.Equal(gotRes, wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes)
- }
- })
+ gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
+
+ if (err != nil) != tt.wantErr {
+ t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
+ }
+ if !cmp.Equal(gotRes, wantRes) {
+ t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes)
+ }
+ },
+ )
}
}
@@ -236,23 +263,25 @@ func TestDeleteInstanceWaitHandler(t *testing.T) {
},
}
for _, tt := range tests {
- t.Run(tt.desc, func(t *testing.T) {
- instanceId := "foo-bar"
+ t.Run(
+ tt.desc, func(t *testing.T) {
+ instanceId := "foo-bar"
- apiClient := &apiClientInstanceMocked{
- instanceGetFails: tt.instanceGetFails,
- instanceIsDeleted: tt.instanceState == InstanceStateSuccess,
- instanceId: instanceId,
- instanceState: tt.instanceState,
- }
+ apiClient := &apiClientInstanceMocked{
+ instanceGetFails: tt.instanceGetFails,
+ instanceIsDeleted: tt.instanceState == InstanceStateSuccess,
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ }
- handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
+ handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
- _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background())
+ _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background())
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- })
+ if (err != nil) != tt.wantErr {
+ t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
+ }
+ },
+ )
}
}