diff --git a/docs/data-sources/postgresflexalpha_database.md b/docs/data-sources/postgresflexalpha_database.md
new file mode 100644
index 00000000..95c115e3
--- /dev/null
+++ b/docs/data-sources/postgresflexalpha_database.md
@@ -0,0 +1,38 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_postgresflexalpha_database Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_postgresflexalpha_database (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_postgresflexalpha_database" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ database_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `database_id` (Number) The ID of the database.
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
+- `name` (String) The name of the database.
+- `owner` (String) The owner of the database.
+- `tf_original_api_id` (Number) The id of the database.
diff --git a/docs/data-sources/postgresflexalpha_flavor.md b/docs/data-sources/postgresflexalpha_flavor.md
new file mode 100644
index 00000000..24c79829
--- /dev/null
+++ b/docs/data-sources/postgresflexalpha_flavor.md
@@ -0,0 +1,54 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_postgresflexalpha_flavor Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_postgresflexalpha_flavor (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
+ project_id = var.project_id
+ region = var.region
+ cpu = 4
+ ram = 16
+ node_type = "Single"
+ storage_class = "premium-perf2-stackit"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `cpu` (Number) The cpu count of the instance.
+- `node_type` (String) defines the nodeType it can be either single or replica
+- `project_id` (String) The cpu count of the instance.
+- `ram` (Number) The memory of the instance in Gibibyte.
+- `region` (String) The flavor description.
+- `storage_class` (String) The memory of the instance in Gibibyte.
+
+### Read-Only
+
+- `description` (String) The flavor description.
+- `flavor_id` (String) The flavor id of the instance flavor.
+- `id` (String) The terraform id of the instance flavor.
+- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
+- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
+- `storage_classes` (Attributes List) (see [below for nested schema](#nestedatt--storage_classes))
+
+
+### Nested Schema for `storage_classes`
+
+Read-Only:
+
+- `class` (String)
+- `max_io_per_sec` (Number)
+- `max_through_in_mb` (Number)
diff --git a/docs/data-sources/postgresflexalpha_flavors.md b/docs/data-sources/postgresflexalpha_flavors.md
new file mode 100644
index 00000000..06645bb4
--- /dev/null
+++ b/docs/data-sources/postgresflexalpha_flavors.md
@@ -0,0 +1,68 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_postgresflexalpha_flavors Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_postgresflexalpha_flavors (Data Source)
+
+
+
+
+
+
+## Schema
+
+### Required
+
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Optional
+
+- `page` (Number) Number of the page of items list to be returned.
+- `size` (Number) Number of items to be returned on each page.
+- `sort` (String) Sorting of the flavors to be returned on each page.
+
+### Read-Only
+
+- `flavors` (Attributes List) List of flavors available for the project. (see [below for nested schema](#nestedatt--flavors))
+- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
+
+
+### Nested Schema for `flavors`
+
+Read-Only:
+
+- `cpu` (Number) The cpu count of the instance.
+- `description` (String) The flavor description.
+- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
+- `memory` (Number) The memory of the instance in Gibibyte.
+- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
+- `node_type` (String) defines the nodeType it can be either single or replica
+- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--flavors--storage_classes))
+- `tf_original_api_id` (String) The id of the instance flavor.
+
+
+### Nested Schema for `flavors.storage_classes`
+
+Read-Only:
+
+- `class` (String)
+- `max_io_per_sec` (Number)
+- `max_through_in_mb` (Number)
+
+
+
+
+### Nested Schema for `pagination`
+
+Read-Only:
+
+- `page` (Number)
+- `size` (Number)
+- `sort` (String)
+- `total_pages` (Number)
+- `total_rows` (Number)
diff --git a/docs/data-sources/postgresflexalpha_instance.md b/docs/data-sources/postgresflexalpha_instance.md
new file mode 100644
index 00000000..d21a5f10
--- /dev/null
+++ b/docs/data-sources/postgresflexalpha_instance.md
@@ -0,0 +1,95 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_postgresflexalpha_instance Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_postgresflexalpha_instance (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_postgresflexalpha_instance" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `acl` (List of String) List of IPV4 cidr.
+- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
+- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
+- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
+
+⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
+- `flavor_id` (String) The id of the instance flavor.
+- `is_deletable` (Boolean) Whether the instance can be deleted or not.
+- `name` (String) The name of the instance.
+- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
+- `replicas` (Number) How many replicas the instance should have.
+- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
+- `status` (String) The current status of the instance.
+- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `tf_original_api_id` (String) The ID of the instance.
+- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
+
+
+### Nested Schema for `connection_info`
+
+Read-Only:
+
+- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
+
+
+### Nested Schema for `connection_info.write`
+
+Read-Only:
+
+- `host` (String) The host of the instance.
+- `port` (Number) The port of the instance.
+
+
+
+
+### Nested Schema for `encryption`
+
+Read-Only:
+
+- `kek_key_id` (String) The encryption-key key identifier
+- `kek_key_ring_id` (String) The encryption-key keyring identifier
+- `kek_key_version` (String) The encryption-key version
+- `service_account` (String)
+
+
+
+### Nested Schema for `network`
+
+Read-Only:
+
+- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
+- `acl` (List of String) List of IPV4 cidr.
+- `instance_address` (String)
+- `router_address` (String)
+
+
+
+### Nested Schema for `storage`
+
+Read-Only:
+
+- `performance_class` (String) The storage class for the storage.
+- `size` (Number) The storage size in Gigabytes.
diff --git a/docs/data-sources/postgresflexalpha_user.md b/docs/data-sources/postgresflexalpha_user.md
new file mode 100644
index 00000000..c3553c7b
--- /dev/null
+++ b/docs/data-sources/postgresflexalpha_user.md
@@ -0,0 +1,42 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_postgresflexalpha_user Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_postgresflexalpha_user (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_postgresflexalpha_user" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+- `user_id` (Number) The ID of the user.
+
+### Optional
+
+- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
+
+### Read-Only
+
+- `name` (String) The name of the user.
+- `roles` (List of String) A list of user roles.
+- `status` (String) The current status of the user.
+- `tf_original_api_id` (Number) The ID of the user.
diff --git a/docs/data-sources/sqlserverflexalpha_database.md b/docs/data-sources/sqlserverflexalpha_database.md
new file mode 100644
index 00000000..df66ffb7
--- /dev/null
+++ b/docs/data-sources/sqlserverflexalpha_database.md
@@ -0,0 +1,32 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexalpha_database Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexalpha_database (Data Source)
+
+
+
+
+
+
+## Schema
+
+### Required
+
+- `database_name` (String) The name of the database.
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
+- `compatibility_level` (Number) CompatibilityLevel of the Database.
+- `id` (String) The terraform internal identifier.
+- `name` (String) The name of the database.
+- `owner` (String) The owner of the database.
+- `tf_original_api_id` (Number) The id of the database.
diff --git a/docs/data-sources/sqlserverflexalpha_flavor.md b/docs/data-sources/sqlserverflexalpha_flavor.md
new file mode 100644
index 00000000..7a03ecfb
--- /dev/null
+++ b/docs/data-sources/sqlserverflexalpha_flavor.md
@@ -0,0 +1,54 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexalpha_flavor Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexalpha_flavor (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
+ project_id = var.project_id
+ region = var.region
+ cpu = 4
+ ram = 16
+ node_type = "Single"
+ storage_class = "premium-perf2-stackit"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `cpu` (Number) The cpu count of the instance.
+- `node_type` (String) defines the nodeType it can be either single or HA
+- `project_id` (String) The project ID of the flavor.
+- `ram` (Number) The memory of the instance in Gibibyte.
+- `region` (String) The region of the flavor.
+- `storage_class` (String) The memory of the instance in Gibibyte.
+
+### Read-Only
+
+- `description` (String) The flavor description.
+- `flavor_id` (String) The id of the instance flavor.
+- `id` (String) The id of the instance flavor.
+- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
+- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
+- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
+
+
+### Nested Schema for `storage_classes`
+
+Read-Only:
+
+- `class` (String)
+- `max_io_per_sec` (Number)
+- `max_through_in_mb` (Number)
diff --git a/docs/data-sources/sqlserverflexalpha_instance.md b/docs/data-sources/sqlserverflexalpha_instance.md
new file mode 100644
index 00000000..b05d7b8e
--- /dev/null
+++ b/docs/data-sources/sqlserverflexalpha_instance.md
@@ -0,0 +1,77 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexalpha_instance Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexalpha_instance (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
+- `edition` (String) Edition of the MSSQL server instance
+- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
+- `flavor_id` (String) The id of the instance flavor.
+- `is_deletable` (Boolean) Whether the instance can be deleted or not.
+- `name` (String) The name of the instance.
+- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
+- `replicas` (Number) How many replicas the instance should have.
+- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
+- `status` (String)
+- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `tf_original_api_id` (String) The ID of the instance.
+- `version` (String) The sqlserver version used for the instance.
+
+
+### Nested Schema for `encryption`
+
+Read-Only:
+
+- `kek_key_id` (String) The key identifier
+- `kek_key_ring_id` (String) The keyring identifier
+- `kek_key_version` (String) The key version
+- `service_account` (String)
+
+
+
+### Nested Schema for `network`
+
+Read-Only:
+
+- `access_scope` (String) The network access scope of the instance
+
+⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
+- `acl` (List of String) List of IPV4 cidr.
+- `instance_address` (String)
+- `router_address` (String)
+
+
+
+### Nested Schema for `storage`
+
+Read-Only:
+
+- `class` (String) The storage class for the storage.
+- `size` (Number) The storage size in Gigabytes.
diff --git a/docs/data-sources/sqlserverflexalpha_user.md b/docs/data-sources/sqlserverflexalpha_user.md
new file mode 100644
index 00000000..63526135
--- /dev/null
+++ b/docs/data-sources/sqlserverflexalpha_user.md
@@ -0,0 +1,62 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexalpha_user (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_sqlserverflexalpha_user" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Optional
+
+- `page` (Number) Number of the page of items list to be returned.
+- `size` (Number) Number of items to be returned on each page.
+- `sort` (String) Sorting of the users to be returned on each page.
+
+### Read-Only
+
+- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
+- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
+
+
+### Nested Schema for `pagination`
+
+Read-Only:
+
+- `page` (Number)
+- `size` (Number)
+- `sort` (String)
+- `total_pages` (Number)
+- `total_rows` (Number)
+
+
+
+### Nested Schema for `users`
+
+Read-Only:
+
+- `status` (String) The current status of the user.
+- `tf_original_api_id` (Number) The ID of the user.
+- `username` (String) The name of the user.
diff --git a/docs/data-sources/sqlserverflexbeta_database.md b/docs/data-sources/sqlserverflexbeta_database.md
new file mode 100644
index 00000000..9322049f
--- /dev/null
+++ b/docs/data-sources/sqlserverflexbeta_database.md
@@ -0,0 +1,40 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_database Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_database (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ database_name = "dbname"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `database_name` (String) The name of the database.
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
+- `compatibility_level` (Number) CompatibilityLevel of the Database.
+- `id` (String) The terraform internal identifier.
+- `name` (String) The name of the database.
+- `owner` (String) The owner of the database.
+- `tf_original_api_id` (Number) The id of the database.
diff --git a/docs/data-sources/sqlserverflexbeta_flavor.md b/docs/data-sources/sqlserverflexbeta_flavor.md
new file mode 100644
index 00000000..4d2a32f3
--- /dev/null
+++ b/docs/data-sources/sqlserverflexbeta_flavor.md
@@ -0,0 +1,54 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
+ project_id = var.project_id
+ region = var.region
+ cpu = 4
+ ram = 16
+ node_type = "Single"
+ storage_class = "premium-perf2-stackit"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `cpu` (Number) The cpu count of the instance.
+- `node_type` (String) defines the nodeType it can be either single or HA
+- `project_id` (String) The project ID of the flavor.
+- `ram` (Number) The memory of the instance in Gibibyte.
+- `region` (String) The region of the flavor.
+- `storage_class` (String) The memory of the instance in Gibibyte.
+
+### Read-Only
+
+- `description` (String) The flavor description.
+- `flavor_id` (String) The id of the instance flavor.
+- `id` (String) The id of the instance flavor.
+- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
+- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
+- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
+
+
+### Nested Schema for `storage_classes`
+
+Read-Only:
+
+- `class` (String)
+- `max_io_per_sec` (Number)
+- `max_through_in_mb` (Number)
diff --git a/docs/data-sources/sqlserverflexbeta_instance.md b/docs/data-sources/sqlserverflexbeta_instance.md
new file mode 100644
index 00000000..431f95f1
--- /dev/null
+++ b/docs/data-sources/sqlserverflexbeta_instance.md
@@ -0,0 +1,77 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_instance Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_instance (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
+- `edition` (String) Edition of the MSSQL server instance
+- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
+- `flavor_id` (String) The id of the instance flavor.
+- `is_deletable` (Boolean) Whether the instance can be deleted or not.
+- `name` (String) The name of the instance.
+- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
+- `replicas` (Number) How many replicas the instance should have.
+- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
+- `status` (String)
+- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `tf_original_api_id` (String) The ID of the instance.
+- `version` (String) The sqlserver version used for the instance.
+
+
+### Nested Schema for `encryption`
+
+Read-Only:
+
+- `kek_key_id` (String) The key identifier
+- `kek_key_ring_id` (String) The keyring identifier
+- `kek_key_version` (String) The key version
+- `service_account` (String)
+
+
+
+### Nested Schema for `network`
+
+Read-Only:
+
+- `access_scope` (String) The network access scope of the instance
+
+⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
+- `acl` (List of String) List of IPV4 cidr.
+- `instance_address` (String)
+- `router_address` (String)
+
+
+
+### Nested Schema for `storage`
+
+Read-Only:
+
+- `class` (String) The storage class for the storage.
+- `size` (Number) The storage size in Gigabytes.
diff --git a/docs/data-sources/sqlserverflexbeta_user.md b/docs/data-sources/sqlserverflexbeta_user.md
new file mode 100644
index 00000000..f87f454e
--- /dev/null
+++ b/docs/data-sources/sqlserverflexbeta_user.md
@@ -0,0 +1,54 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_user (Data Source)
+
+
+
+
+
+
+## Schema
+
+### Required
+
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Optional
+
+- `page` (Number) Number of the page of items list to be returned.
+- `size` (Number) Number of items to be returned on each page.
+- `sort` (String) Sorting of the users to be returned on each page.
+
+### Read-Only
+
+- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
+- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
+
+
+### Nested Schema for `pagination`
+
+Read-Only:
+
+- `page` (Number)
+- `size` (Number)
+- `sort` (String)
+- `total_pages` (Number)
+- `total_rows` (Number)
+
+
+
+### Nested Schema for `users`
+
+Read-Only:
+
+- `status` (String) The current status of the user.
+- `tf_original_api_id` (Number) The ID of the user.
+- `username` (String) The name of the user.
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 00000000..84bc25b3
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,83 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview Provider"
+description: |-
+
+---
+
+# stackitprivatepreview Provider
+
+
+
+## Example Usage
+
+```terraform
+provider "stackitprivatepreview" {
+ default_region = "eu01"
+}
+
+provider "stackitprivatepreview" {
+ default_region = "eu01"
+ service_account_key_path = "service_account.json"
+}
+
+# Authentication
+
+# Key flow
+provider "stackitprivatepreview" {
+ default_region = "eu01"
+ service_account_key = var.service_account_key
+ private_key = var.private_key
+}
+
+# Key flow (using path)
+provider "stackitprivatepreview" {
+ default_region = "eu01"
+ service_account_key_path = var.service_account_key_path
+ private_key_path = var.private_key_path
+}
+```
+
+
+## Schema
+
+### Optional
+
+- `authorization_custom_endpoint` (String) Custom endpoint for the Membership service
+- `cdn_custom_endpoint` (String) Custom endpoint for the CDN service
+- `credentials_path` (String) Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.
+- `default_region` (String) Region will be used as the default location for regional services. Not all services require a region, some are global
+- `dns_custom_endpoint` (String) Custom endpoint for the DNS service
+- `enable_beta_resources` (Boolean) Enable beta resources. Default is false.
+- `experiments` (List of String) Enables experiments. These are unstable features without official support. More information can be found in the README. Available Experiments: iam, routing-tables, network
+- `git_custom_endpoint` (String) Custom endpoint for the Git service
+- `iaas_custom_endpoint` (String) Custom endpoint for the IaaS service
+- `kms_custom_endpoint` (String) Custom endpoint for the KMS service
+- `loadbalancer_custom_endpoint` (String) Custom endpoint for the Load Balancer service
+- `logme_custom_endpoint` (String) Custom endpoint for the LogMe service
+- `mariadb_custom_endpoint` (String) Custom endpoint for the MariaDB service
+- `modelserving_custom_endpoint` (String) Custom endpoint for the AI Model Serving service
+- `mongodbflex_custom_endpoint` (String) Custom endpoint for the MongoDB Flex service
+- `objectstorage_custom_endpoint` (String) Custom endpoint for the Object Storage service
+- `observability_custom_endpoint` (String) Custom endpoint for the Observability service
+- `opensearch_custom_endpoint` (String) Custom endpoint for the OpenSearch service
+- `postgresflex_custom_endpoint` (String) Custom endpoint for the PostgresFlex service
+- `private_key` (String) Private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
+- `private_key_path` (String) Path for the private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
+- `rabbitmq_custom_endpoint` (String) Custom endpoint for the RabbitMQ service
+- `redis_custom_endpoint` (String) Custom endpoint for the Redis service
+- `region` (String, Deprecated) Region will be used as the default location for regional services. Not all services require a region, some are global
+- `resourcemanager_custom_endpoint` (String) Custom endpoint for the Resource Manager service
+- `scf_custom_endpoint` (String) Custom endpoint for the Cloud Foundry (SCF) service
+- `secretsmanager_custom_endpoint` (String) Custom endpoint for the Secrets Manager service
+- `server_backup_custom_endpoint` (String) Custom endpoint for the Server Backup service
+- `server_update_custom_endpoint` (String) Custom endpoint for the Server Update service
+- `service_account_custom_endpoint` (String) Custom endpoint for the Service Account service
+- `service_account_email` (String, Deprecated) Service account email. It can also be set using the environment variable STACKIT_SERVICE_ACCOUNT_EMAIL. It is required if you want to use the resource manager project resource.
+- `service_account_key` (String) Service account key used for authentication. If set, the key flow will be used to authenticate all operations.
+- `service_account_key_path` (String) Path for the service account key used for authentication. If set, the key flow will be used to authenticate all operations.
+- `service_account_token` (String, Deprecated) Token used for authentication. If set, the token flow will be used to authenticate all operations.
+- `service_enablement_custom_endpoint` (String) Custom endpoint for the Service Enablement API
+- `ske_custom_endpoint` (String) Custom endpoint for the Kubernetes Engine (SKE) service
+- `sqlserverflex_custom_endpoint` (String) Custom endpoint for the SQL Server Flex service
+- `token_custom_endpoint` (String) Custom endpoint for the token API, which is used to request access tokens when using the key flow
diff --git a/docs/resources/postgresflexalpha_database.md b/docs/resources/postgresflexalpha_database.md
new file mode 100644
index 00000000..6c94fd62
--- /dev/null
+++ b/docs/resources/postgresflexalpha_database.md
@@ -0,0 +1,57 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_postgresflexalpha_database Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_postgresflexalpha_database (Resource)
+
+
+
+## Example Usage
+
+```terraform
+resource "stackitprivatepreview_postgresflexalpha_database" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "mydb"
+ owner = "myusername"
+}
+
+# Only use the import statement, if you want to import an existing postgresflex database
+import {
+ to = stackitprivatepreview_postgresflexalpha_database.import-example
+ id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
+}
+
+import {
+ to = stackitprivatepreview_postgresflexalpha_database.import-example
+ identity = {
+ project_id = "project_id"
+ region = "region"
+ instance_id = "instance_id"
+ database_id = "database_id"
+ }
+}
+```
+
+
+## Schema
+
+### Required
+
+- `name` (String) The name of the database.
+
+### Optional
+
+- `database_id` (Number) The ID of the database.
+- `instance_id` (String) The ID of the instance.
+- `owner` (String) The owner of the database.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `id` (Number) The id of the database.
diff --git a/docs/resources/postgresflexalpha_instance.md b/docs/resources/postgresflexalpha_instance.md
new file mode 100644
index 00000000..f6f10bcc
--- /dev/null
+++ b/docs/resources/postgresflexalpha_instance.md
@@ -0,0 +1,138 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_postgresflexalpha_instance Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_postgresflexalpha_instance (Resource)
+
+
+
+## Example Usage
+
+```terraform
+resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ backup_schedule = "0 0 * * *"
+ retention_days = 30
+ flavor_id = "flavor.id"
+ replicas = 1
+ storage = {
+ performance_class = "premium-perf2-stackit"
+ size = 10
+ }
+ encryption = {
+ kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ kek_key_version = 1
+ service_account = "service@account.email"
+ }
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "PUBLIC"
+ }
+ version = 17
+}
+
+# Only use the import statement, if you want to import an existing postgresflex instance
+import {
+ to = stackitprivatepreview_postgresflexalpha_instance.import-example
+ id = "${var.project_id},${var.region},${var.postgres_instance_id}"
+}
+
+import {
+ to = stackitprivatepreview_postgresflexalpha_instance.import-example
+ identity = {
+ project_id = var.project_id
+ region = var.region
+ instance_id = var.postgres_instance_id
+ }
+}
+```
+
+
+## Schema
+
+### Required
+
+- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
+- `flavor_id` (String) The id of the instance flavor.
+- `name` (String) The name of the instance.
+- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
+- `replicas` (Number) How many replicas the instance should have.
+- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
+- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
+
+### Optional
+
+- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
+
+⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `acl` (List of String) List of IPV4 cidr.
+- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
+- `id` (String) The ID of the instance.
+- `is_deletable` (Boolean) Whether the instance can be deleted or not.
+- `status` (String) The current status of the instance.
+
+
+### Nested Schema for `network`
+
+Required:
+
+- `acl` (List of String) List of IPV4 cidr.
+
+Optional:
+
+- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
+
+Read-Only:
+
+- `instance_address` (String)
+- `router_address` (String)
+
+
+
+### Nested Schema for `storage`
+
+Required:
+
+- `performance_class` (String) The storage class for the storage.
+- `size` (Number) The storage size in Gigabytes.
+
+
+
+### Nested Schema for `encryption`
+
+Required:
+
+- `kek_key_id` (String) The encryption-key key identifier
+- `kek_key_ring_id` (String) The encryption-key keyring identifier
+- `kek_key_version` (String) The encryption-key version
+- `service_account` (String)
+
+
+
+### Nested Schema for `connection_info`
+
+Read-Only:
+
+- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
+
+
+### Nested Schema for `connection_info.write`
+
+Read-Only:
+
+- `host` (String) The host of the instance.
+- `port` (Number) The port of the instance.
diff --git a/docs/resources/postgresflexalpha_user.md b/docs/resources/postgresflexalpha_user.md
new file mode 100644
index 00000000..b83de15d
--- /dev/null
+++ b/docs/resources/postgresflexalpha_user.md
@@ -0,0 +1,59 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_postgresflexalpha_user Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_postgresflexalpha_user (Resource)
+
+
+
+## Example Usage
+
+```terraform
+resource "stackitprivatepreview_postgresflexalpha_user" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "username"
+ roles = ["role"]
+}
+
+# Only use the import statement, if you want to import an existing postgresflex user
+import {
+ to = stackitprivatepreview_postgresflexalpha_user.import-example
+ id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
+}
+
+import {
+ to = stackitprivatepreview_postgresflexalpha_user.import-example
+ identity = {
+ project_id = "project.id"
+ region = "region"
+ instance_id = "instance.id"
+ user_id = "user.id"
+ }
+}
+```
+
+
+## Schema
+
+### Required
+
+- `name` (String) The name of the user.
+
+### Optional
+
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+- `roles` (List of String) A list containing the user roles for the instance.
+- `user_id` (Number) The ID of the user.
+
+### Read-Only
+
+- `id` (Number) The ID of the user.
+- `password` (String) The password for the user.
+- `status` (String) The current status of the user.
diff --git a/docs/resources/sqlserverflexalpha_database.md b/docs/resources/sqlserverflexalpha_database.md
new file mode 100644
index 00000000..7d8f050b
--- /dev/null
+++ b/docs/resources/sqlserverflexalpha_database.md
@@ -0,0 +1,63 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexalpha_database Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexalpha_database (Resource)
+
+
+
+## Example Usage
+
+```terraform
+resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ collation = ""
+ compatibility = "160"
+ name = ""
+ owner = ""
+}
+
+# Only use the import statement, if you want to import a existing sqlserverflex database
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_database.import-example
+ id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
+}
+
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_database.import-example
+ identity = {
+ project_id = "project.id"
+ region = "region"
+ instance_id = "instance.id"
+ database_id = "database.id"
+ }
+}
+```
+
+
+## Schema
+
+### Required
+
+- `name` (String) The name of the database.
+- `owner` (String) The owner of the database.
+
+### Optional
+
+- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
+- `compatibility` (Number) CompatibilityLevel of the Database.
+- `database_name` (String) The name of the database.
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
+- `compatibility_level` (Number) CompatibilityLevel of the Database.
+- `id` (Number) The id of the database.
diff --git a/docs/resources/sqlserverflexalpha_instance.md b/docs/resources/sqlserverflexalpha_instance.md
new file mode 100644
index 00000000..95e33673
--- /dev/null
+++ b/docs/resources/sqlserverflexalpha_instance.md
@@ -0,0 +1,103 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexalpha_instance Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexalpha_instance (Resource)
+
+
+
+## Example Usage
+
+```terraform
+resource "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ backup_schedule = "00 00 * * *"
+ flavor = {
+ cpu = 4
+ ram = 16
+ }
+ storage = {
+ class = "class"
+ size = 5
+ }
+ version = 2022
+}
+
+# Only use the import statement, if you want to import an existing sqlserverflex instance
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
+ id = "${var.project_id},${var.region},${var.sql_instance_id}"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
+- `flavor_id` (String) The id of the instance flavor.
+- `name` (String) The name of the instance.
+- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
+- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
+- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `version` (String) The sqlserver version used for the instance.
+
+### Optional
+
+- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `edition` (String) Edition of the MSSQL server instance
+- `id` (String) The ID of the instance.
+- `is_deletable` (Boolean) Whether the instance can be deleted or not.
+- `replicas` (Number) How many replicas the instance should have.
+- `status` (String)
+
+
+### Nested Schema for `network`
+
+Required:
+
+- `acl` (List of String) List of IPV4 cidr.
+
+Optional:
+
+- `access_scope` (String) The network access scope of the instance
+
+⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
+
+Read-Only:
+
+- `instance_address` (String)
+- `router_address` (String)
+
+
+
+### Nested Schema for `storage`
+
+Required:
+
+- `class` (String) The storage class for the storage.
+- `size` (Number) The storage size in Gigabytes.
+
+
+
+### Nested Schema for `encryption`
+
+Required:
+
+- `kek_key_id` (String) The key identifier
+- `kek_key_ring_id` (String) The keyring identifier
+- `kek_key_version` (String) The key version
+- `service_account` (String)
diff --git a/docs/resources/sqlserverflexalpha_user.md b/docs/resources/sqlserverflexalpha_user.md
new file mode 100644
index 00000000..85d5350e
--- /dev/null
+++ b/docs/resources/sqlserverflexalpha_user.md
@@ -0,0 +1,53 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexalpha_user Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexalpha_user (Resource)
+
+
+
+## Example Usage
+
+```terraform
+resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ username = "username"
+ roles = ["role"]
+}
+
+# Only use the import statement, if you want to import an existing sqlserverflex user
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_user.import-example
+ id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
+- `username` (String) The name of the user.
+
+### Optional
+
+- `default_database` (String) The default database for a user of the instance.
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+- `user_id` (Number) The ID of the user.
+
+### Read-Only
+
+- `host` (String) The host of the instance in which the user belongs to.
+- `id` (Number) The ID of the user.
+- `password` (String) The password for the user.
+- `port` (Number) The port of the instance in which the user belongs to.
+- `status` (String) The current status of the user.
+- `uri` (String) The connection string for the user to the instance.
diff --git a/docs/resources/sqlserverflexbeta_database.md b/docs/resources/sqlserverflexbeta_database.md
new file mode 100644
index 00000000..fabaaccb
--- /dev/null
+++ b/docs/resources/sqlserverflexbeta_database.md
@@ -0,0 +1,51 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_database Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_database (Resource)
+
+
+
+## Example Usage
+
+```terraform
+resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ username = "username"
+ roles = ["role"]
+}
+
+# Only use the import statement, if you want to import an existing sqlserverflex user
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_user.import-example
+ id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `name` (String) The name of the database.
+- `owner` (String) The owner of the database.
+
+### Optional
+
+- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
+- `compatibility` (Number) CompatibilityLevel of the Database.
+- `database_name` (String) The name of the database.
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
+- `compatibility_level` (Number) CompatibilityLevel of the Database.
+- `id` (Number) The id of the database.
diff --git a/docs/resources/sqlserverflexbeta_instance.md b/docs/resources/sqlserverflexbeta_instance.md
new file mode 100644
index 00000000..20f5a9bc
--- /dev/null
+++ b/docs/resources/sqlserverflexbeta_instance.md
@@ -0,0 +1,158 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_instance Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_instance (Resource)
+
+
+
+## Example Usage
+
+```terraform
+# without encryption and SNA
+resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = "flavor_id"
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "SNA"
+ }
+}
+
+# without encryption and PUBLIC
+resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = "flavor_id"
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "PUBLIC"
+ }
+}
+
+# with encryption and SNA
+resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = "flavor_id"
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ encryption = {
+ kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ kek_key_version = 1
+ service_account = "service_account@email"
+ }
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "SNA"
+ }
+}
+
+
+# Only use the import statement, if you want to import an existing sqlserverflex instance
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
+ id = "${var.project_id},${var.region},${var.sql_instance_id}"
+}
+
+# import with identity
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
+ identity = {
+ project_id = var.project_id
+ region = var.region
+ instance_id = var.sql_instance_id
+ }
+}
+```
+
+
+## Schema
+
+### Required
+
+- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
+- `flavor_id` (String) The id of the instance flavor.
+- `name` (String) The name of the instance.
+- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
+- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
+- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `version` (String) The sqlserver version used for the instance.
+
+### Optional
+
+- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `edition` (String) Edition of the MSSQL server instance
+- `id` (String) The ID of the instance.
+- `is_deletable` (Boolean) Whether the instance can be deleted or not.
+- `replicas` (Number) How many replicas the instance should have.
+- `status` (String)
+
+
+### Nested Schema for `network`
+
+Required:
+
+- `acl` (List of String) List of IPV4 cidr.
+
+Optional:
+
+- `access_scope` (String) The network access scope of the instance
+
+⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
+
+Read-Only:
+
+- `instance_address` (String)
+- `router_address` (String)
+
+
+
+### Nested Schema for `storage`
+
+Required:
+
+- `class` (String) The storage class for the storage.
+- `size` (Number) The storage size in Gigabytes.
+
+
+
+### Nested Schema for `encryption`
+
+Required:
+
+- `kek_key_id` (String) The key identifier
+- `kek_key_ring_id` (String) The keyring identifier
+- `kek_key_version` (String) The key version
+- `service_account` (String)
diff --git a/docs/resources/sqlserverflexbeta_user.md b/docs/resources/sqlserverflexbeta_user.md
new file mode 100644
index 00000000..81d6da28
--- /dev/null
+++ b/docs/resources/sqlserverflexbeta_user.md
@@ -0,0 +1,53 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_user (Resource)
+
+
+
+## Example Usage
+
+```terraform
+resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ username = "username"
+ roles = ["role"]
+}
+
+# Only use the import statement, if you want to import an existing sqlserverflex user
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_user.import-example
+ id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
+- `username` (String) The name of the user.
+
+### Optional
+
+- `default_database` (String) The default database for a user of the instance.
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+- `user_id` (Number) The ID of the user.
+
+### Read-Only
+
+- `host` (String) The host of the instance in which the user belongs to.
+- `id` (Number) The ID of the user.
+- `password` (String) The password for the user.
+- `port` (Number) The port of the instance in which the user belongs to.
+- `status` (String) The current status of the user.
+- `uri` (String) The connection string for the user to the instance.
diff --git a/generator/cmd/build/build.go b/generator/cmd/build/build.go
index 0b7ec942..5d560b32 100644
--- a/generator/cmd/build/build.go
+++ b/generator/cmd/build/build.go
@@ -1,34 +1,17 @@
package build
import (
- "bufio"
- "bytes"
"errors"
"fmt"
"go/ast"
"go/parser"
"go/token"
- "io"
- "log"
"log/slog"
"os"
"os/exec"
"path"
- "path/filepath"
"regexp"
- "strconv"
"strings"
- "text/template"
-
- "github.com/ldez/go-git-cmd-wrapper/v2/clone"
- "github.com/ldez/go-git-cmd-wrapper/v2/git"
-)
-
-const (
- OAS_REPO_NAME = "stackit-api-specifications"
- OAS_REPO = "https://github.com/stackitcloud/stackit-api-specifications.git"
- GEN_REPO_NAME = "stackit-sdk-generator"
- GEN_REPO = "https://github.com/stackitcloud/stackit-sdk-generator.git"
)
type version struct {
@@ -38,6 +21,7 @@ type version struct {
}
type Builder struct {
+ rootDir string
SkipClone bool
SkipCleanup bool
PackagesOnly bool
@@ -51,15 +35,9 @@ func (b *Builder) Build() error {
slog.Info(" >>> only generating pkg_gen <<<")
}
- root, err := getRoot()
- if err != nil {
- log.Fatal(err)
- }
- if root == nil || *root == "" {
- return fmt.Errorf("unable to determine root directory from git")
- }
- if b.Verbose {
- slog.Info(" ... using root directory", "dir", *root)
+ rootErr := b.determineRoot()
+ if rootErr != nil {
+ return rootErr
}
if !b.PackagesOnly {
@@ -72,214 +50,57 @@ func (b *Builder) Build() error {
}
}
- if !b.SkipCleanup {
- slog.Info("Cleaning up old packages directory")
- err = os.RemoveAll(path.Join(*root, "pkg_gen"))
- if err != nil {
- return err
- }
+ //if !b.SkipCleanup {
+ // slog.Info("Cleaning up old packages directory")
+ // err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
+ // if err != nil {
+ // return err
+ // }
+ //}
+ //
+ //if !b.SkipCleanup && !b.PackagesOnly {
+ // slog.Info("Cleaning up old packages directory")
+ // err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
+ // if err != nil {
+ // return err
+ // }
+ //}
+
+ //slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
+ //genDir := path.Join(*root, GEN_REPO_NAME)
+ //if !b.SkipClone {
+ // err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
+ // if err != nil {
+ // return err
+ // }
+ //}
+
+ oasHandlerErr := b.oasHandler(path.Join(b.rootDir, "service_specs"))
+ if oasHandlerErr != nil {
+ return oasHandlerErr
}
- if !b.SkipCleanup && !b.PackagesOnly {
- slog.Info("Cleaning up old packages directory")
- err = os.RemoveAll(path.Join(*root, "pkg_gen"))
- if err != nil {
- return err
- }
- }
-
- slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
- genDir := path.Join(*root, GEN_REPO_NAME)
- if !b.SkipClone {
- err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
- if err != nil {
- return err
- }
- }
-
- slog.Info("Creating oas repo dir", "dir", fmt.Sprintf("%s/%s", *root, OAS_REPO_NAME))
- repoDir, err := b.createRepoDir(genDir, OAS_REPO, OAS_REPO_NAME, b.SkipClone)
- if err != nil {
- return fmt.Errorf("%s", err.Error())
- }
-
- // TODO - major
- verMap, err := b.getVersions(repoDir)
- if err != nil {
- return fmt.Errorf("%s", err.Error())
- }
-
- slog.Info("Reducing to only latest or highest")
- res, err := getOnlyLatest(verMap)
- if err != nil {
- return fmt.Errorf("%s", err.Error())
- }
-
- slog.Info(">> Creating OAS dir", "path", path.Join(genDir, "oas", "services"))
- err = os.MkdirAll(path.Join(genDir, "oas", "services"), 0o755) //nolint:gosec // this dir is not sensitive, so we can use 0755
- if err != nil {
- return err
- }
-
- slog.Info(">> Copying OAS files")
- for service, item := range res {
- baseService := strings.TrimSuffix(service, "alpha")
- baseService = strings.TrimSuffix(baseService, "beta")
- itemVersion := fmt.Sprintf("v%d%s", item.major, item.verString)
- if item.minor != 0 {
- itemVersion = itemVersion + "" + strconv.Itoa(item.minor)
- }
- srcFile := path.Join(
- repoDir,
- "services",
- baseService,
- itemVersion,
- fmt.Sprintf("%s.json", baseService),
- )
- dstFile := path.Join(genDir, "oas", "services", fmt.Sprintf("%s.json", service))
- _, err = copyFile(srcFile, dstFile)
- if err != nil {
- return fmt.Errorf("%s", err.Error())
- }
- }
-
- slog.Info("Changing dir", "dir", genDir)
- err = os.Chdir(genDir)
- if err != nil {
- return err
- }
-
- slog.Info("Calling make", "command", "generate-go-sdk")
- cmd := exec.Command("make", "generate-go-sdk")
- var stdOut, stdErr bytes.Buffer
- cmd.Stdout = &stdOut
- cmd.Stderr = &stdErr
-
- if err = cmd.Start(); err != nil {
- slog.Error("cmd.Start", "error", err)
- return err
- }
-
- if err = cmd.Wait(); err != nil {
- var exitErr *exec.ExitError
- if errors.As(err, &exitErr) {
- slog.Error(
- "cmd.Wait",
- "code",
- exitErr.ExitCode(),
- "error",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return fmt.Errorf("%s", stdErr.String())
- }
- if err != nil {
- slog.Error("cmd.Wait", "err", err)
- return err
- }
- }
-
- slog.Info("Cleaning up go.mod and go.sum files")
- cleanDir := path.Join(genDir, "sdk-repo-updated", "services")
- dirEntries, err := os.ReadDir(cleanDir)
- if err != nil {
- return err
- }
- for _, entry := range dirEntries {
- if entry.IsDir() {
- err = deleteFiles(
- path.Join(cleanDir, entry.Name(), "go.mod"),
- path.Join(cleanDir, entry.Name(), "go.sum"),
- )
- if err != nil {
- return err
- }
- }
- }
-
- slog.Info("Changing dir", "dir", *root)
- err = os.Chdir(*root)
- if err != nil {
- return err
- }
-
- slog.Info("Rearranging package directories")
- //nolint:gosec // this dir is not sensitive, so we can use 0755
- err = os.MkdirAll(
- path.Join(*root, "pkg_gen"),
- 0o755,
- )
- if err != nil {
- return err
- }
- srcDir := path.Join(genDir, "sdk-repo-updated", "services")
- items, err := os.ReadDir(srcDir)
- if err != nil {
- return err
- }
- for _, item := range items {
- if !item.IsDir() {
- continue
- }
- slog.Info(" -> package", "name", item.Name())
- tgtDir := path.Join(*root, "pkg_gen", item.Name())
- if fileExists(tgtDir) {
- delErr := os.RemoveAll(tgtDir)
- if delErr != nil {
- return delErr
- }
- }
- err = os.Rename(path.Join(srcDir, item.Name()), tgtDir)
- if err != nil {
- return err
- }
- }
-
- if !b.PackagesOnly {
- slog.Info("Generating service boilerplate")
- err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
- if err != nil {
- return err
- }
-
- slog.Info("Copying all service files")
- err = CopyDirectory(
- path.Join(*root, "generated", "internal", "services"),
- path.Join(*root, "stackit", "internal", "services"),
- )
- if err != nil {
- return err
- }
-
- err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
- if err != nil {
- return err
- }
- }
-
- if !b.SkipCleanup {
- slog.Info("Finally removing temporary files and directories")
- err = os.RemoveAll(path.Join(*root, "generated"))
- if err != nil {
- slog.Error("RemoveAll", "dir", path.Join(*root, "generated"), "err", err)
- return err
- }
-
- err = os.RemoveAll(path.Join(*root, GEN_REPO_NAME))
- if err != nil {
- slog.Error("RemoveAll", "dir", path.Join(*root, GEN_REPO_NAME), "err", err)
- return err
- }
-
- slog.Info("Cleaning up", "dir", repoDir)
- err = os.RemoveAll(filepath.Dir(repoDir))
- if err != nil {
- return fmt.Errorf("%s", err.Error())
- }
- }
+ //if !b.PackagesOnly {
+ // slog.Info("Generating service boilerplate")
+ // err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
+ // if err != nil {
+ // return err
+ // }
+ //
+ // slog.Info("Copying all service files")
+ // err = CopyDirectory(
+ // path.Join(*root, "generated", "internal", "services"),
+ // path.Join(*root, "stackit", "internal", "services"),
+ // )
+ // if err != nil {
+ // return err
+ // }
+ //
+ // err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
+ // if err != nil {
+ // return err
+ // }
+ //}
slog.Info("Done")
return nil
@@ -295,17 +116,6 @@ type templateData struct {
Fields []string
}
-func fileExists(pathValue string) bool {
- _, err := os.Stat(pathValue)
- if os.IsNotExist(err) {
- return false
- }
- if err != nil {
- panic(err)
- }
- return true
-}
-
func createBoilerplate(rootFolder, folder string) error {
services, err := os.ReadDir(folder)
if err != nil {
@@ -339,7 +149,7 @@ func createBoilerplate(rootFolder, folder string) error {
"datasources_gen",
fmt.Sprintf("%s_data_source_gen.go", res.Name()),
)
- handleDS = fileExists(dsFile)
+ handleDS = FileExists(dsFile)
resFile := path.Join(
folder,
@@ -348,13 +158,13 @@ func createBoilerplate(rootFolder, folder string) error {
"resources_gen",
fmt.Sprintf("%s_resource_gen.go", res.Name()),
)
- handleRes = fileExists(resFile)
+ handleRes = FileExists(resFile)
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
- foundDS = fileExists(dsGoFile)
+ foundDS = FileExists(dsGoFile)
resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
- foundRes = fileExists(resGoFile)
+ foundRes = FileExists(resGoFile)
if handleDS && !foundDS {
slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
@@ -417,7 +227,7 @@ func createBoilerplate(rootFolder, folder string) error {
return err
}
- if !fileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
+ if !FileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
@@ -446,393 +256,6 @@ func createBoilerplate(rootFolder, folder string) error {
return nil
}
-func ucfirst(s string) string {
- if s == "" {
- return ""
- }
- return strings.ToUpper(s[:1]) + s[1:]
-}
-
-func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
- fn := template.FuncMap{
- "ucfirst": ucfirst,
- }
-
- tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
- if err != nil {
- return err
- }
-
- var f *os.File
- f, err = os.Create(outFile)
- if err != nil {
- return err
- }
-
- err = tmpl.Execute(f, *data)
- if err != nil {
- return err
- }
-
- err = f.Close()
- if err != nil {
- return err
- }
- return nil
-}
-
-func generateServiceFiles(rootDir, generatorDir string) error {
- //nolint:gosec // this file is not sensitive, so we can use 0755
- err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0o755)
- if err != nil {
- return err
- }
-
- services, err := os.ReadDir(path.Join(rootDir, "service_specs"))
- if err != nil {
- return err
- }
- for _, service := range services {
- if !service.IsDir() {
- continue
- }
-
- versions, err := os.ReadDir(path.Join(rootDir, "service_specs", service.Name()))
- if err != nil {
- return err
- }
- for _, svcVersion := range versions {
- if !svcVersion.IsDir() {
- continue
- }
-
- // TODO: use const of supported versions
- if svcVersion.Name() != "alpha" && svcVersion.Name() != "beta" {
- continue
- }
-
- specFiles, err := os.ReadDir(path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name()))
- if err != nil {
- return err
- }
-
- for _, specFile := range specFiles {
- if specFile.IsDir() {
- continue
- }
-
- r := regexp.MustCompile(`^(.*)_config.yml$`)
- matches := r.FindAllStringSubmatch(specFile.Name(), -1)
- if matches != nil {
- fileName := matches[0][0]
- resource := matches[0][1]
- slog.Info(
- " found service spec",
- "name",
- specFile.Name(),
- "service",
- service.Name(),
- "resource",
- resource,
- )
-
- oasFile := path.Join(
- generatorDir,
- "oas",
- "services",
- fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()),
- )
- if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
- slog.Warn(
- " could not find matching oas",
- "svc",
- service.Name(),
- "version",
- svcVersion.Name(),
- )
- continue
- }
-
- scName := fmt.Sprintf("%s%s", service.Name(), svcVersion.Name())
- scName = strings.ReplaceAll(scName, "-", "")
- //nolint:gosec // this file is not sensitive, so we can use 0755
- err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0o755)
- if err != nil {
- return err
- }
-
- specJsonFile := path.Join(
- rootDir,
- "generated",
- "specs",
- fmt.Sprintf("%s_%s_spec.json", scName, resource),
- )
-
- var stdOut, stdErr bytes.Buffer
-
- // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
- cmd := exec.Command(
- "go",
- "run",
- "github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi",
- "generate",
- "--config",
- path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name(), fileName),
- "--output",
- specJsonFile,
- oasFile,
- )
- cmd.Stdout = &stdOut
- cmd.Stderr = &stdErr
-
- if err = cmd.Start(); err != nil {
- slog.Error(
- "tfplugingen-openapi generate",
- "error",
- err,
- "stdOut",
- stdOut.String(),
- "stdErr",
- stdErr.String(),
- )
- return err
- }
-
- if err = cmd.Wait(); err != nil {
- var exitErr *exec.ExitError
- if errors.As(err, &exitErr) {
- slog.Error(
- "tfplugingen-openapi generate",
- "code",
- exitErr.ExitCode(),
- "error",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return fmt.Errorf("%s", stdErr.String())
- }
- if err != nil {
- slog.Error(
- "tfplugingen-openapi generate",
- "err",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return err
- }
- }
- if stdOut.Len() > 0 {
- slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
- }
-
- tgtFolder := path.Join(
- rootDir,
- "generated",
- "internal",
- "services",
- scName,
- resource,
- "resources_gen",
- )
- //nolint:gosec // this file is not sensitive, so we can use 0755
- err = os.MkdirAll(tgtFolder, 0o755)
- if err != nil {
- return err
- }
-
- // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
- cmd2 := exec.Command(
- "go",
- "run",
- "github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework",
- "generate",
- "resources",
- "--input",
- specJsonFile,
- "--output",
- tgtFolder,
- "--package",
- scName,
- )
-
- cmd2.Stdout = &stdOut
- cmd2.Stderr = &stdErr
- if err = cmd2.Start(); err != nil {
- slog.Error("tfplugingen-framework generate resources", "error", err)
- return err
- }
-
- if err = cmd2.Wait(); err != nil {
- var exitErr *exec.ExitError
- if errors.As(err, &exitErr) {
- slog.Error(
- "tfplugingen-framework generate resources",
- "code",
- exitErr.ExitCode(),
- "error",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return fmt.Errorf("%s", stdErr.String())
- }
- if err != nil {
- slog.Error(
- "tfplugingen-framework generate resources",
- "err",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return err
- }
- }
-
- tgtFolder = path.Join(
- rootDir,
- "generated",
- "internal",
- "services",
- scName,
- resource,
- "datasources_gen",
- )
- //nolint:gosec // this directory is not sensitive, so we can use 0755
- err = os.MkdirAll(tgtFolder, 0o755)
- if err != nil {
- return err
- }
-
- // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
- cmd3 := exec.Command(
- "go",
- "run",
- "github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework",
- "generate",
- "data-sources",
- "--input",
- specJsonFile,
- "--output",
- tgtFolder,
- "--package",
- scName,
- )
- var stdOut3, stdErr3 bytes.Buffer
- cmd3.Stdout = &stdOut3
- cmd3.Stderr = &stdErr3
-
- if err = cmd3.Start(); err != nil {
- slog.Error("tfplugingen-framework generate data-sources", "error", err)
- return err
- }
-
- if err = cmd3.Wait(); err != nil {
- var exitErr *exec.ExitError
- if errors.As(err, &exitErr) {
- slog.Error(
- "tfplugingen-framework generate data-sources",
- "code",
- exitErr.ExitCode(),
- "error",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return fmt.Errorf("%s", stdErr.String())
- }
- if err != nil {
- slog.Error(
- "tfplugingen-framework generate data-sources",
- "err",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return err
- }
- }
-
- tfAnoErr := handleTfTagForDatasourceFile(
- path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
- scName,
- resource,
- )
- if tfAnoErr != nil {
- return tfAnoErr
- }
- }
- }
- }
- }
- return nil
-}
-
-// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
-func handleTfTagForDatasourceFile(filePath, service, resource string) error {
- slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
- if !fileExists(filePath) {
- slog.Warn(" could not find file, skipping", "path", filePath)
- return nil
- }
- f, err := os.Open(filePath)
- if err != nil {
- return err
- }
-
- root, err := getRoot()
- if err != nil {
- //nolint:gocritic // in this case, we want to log the error and exit, as we cannot proceed without the root directory
- log.Fatal(err)
- }
-
- tmp, err := os.CreateTemp(*root, "replace-*")
- if err != nil {
- return err
- }
-
- sc := bufio.NewScanner(f)
- for sc.Scan() {
- resLine, err := handleLine(sc.Text())
- if err != nil {
- return err
- }
- if _, err := tmp.WriteString(resLine + "\n"); err != nil {
- return err
- }
- }
- if scErr := sc.Err(); scErr != nil {
- return scErr
- }
-
- if err := tmp.Close(); err != nil {
- return err
- }
-
- if err := f.Close(); err != nil {
- return err
- }
-
- //nolint:gosec // path traversal is not a concern here
- if err := os.Rename(tmp.Name(), filePath); err != nil {
- log.Fatal(err)
- }
- return nil
-}
-
func handleLine(line string) (string, error) {
schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
@@ -850,221 +273,43 @@ func handleLine(line string) (string, error) {
return line, nil
}
-func checkCommands(commands []string) error {
- for _, commandName := range commands {
- if !commandExists(commandName) {
- return fmt.Errorf("missing command %s", commandName)
- }
- slog.Info(" found", "command", commandName)
- }
- return nil
-}
-
-func commandExists(cmd string) bool {
- _, err := exec.LookPath(cmd)
- return err == nil
-}
-
-func deleteFiles(fNames ...string) error {
- for _, fName := range fNames {
- if _, err := os.Stat(fName); !os.IsNotExist(err) {
- err = os.Remove(fName)
- if err != nil {
- return err
- }
- }
- }
- return nil
-}
-
-func copyFile(src, dst string) (int64, error) {
- sourceFileStat, err := os.Stat(src)
- if err != nil {
- return 0, err
- }
-
- if !sourceFileStat.Mode().IsRegular() {
- return 0, fmt.Errorf("%s is not a regular file", src)
- }
-
- source, err := os.Open(src)
- if err != nil {
- return 0, err
- }
- defer func(source *os.File) {
- err := source.Close()
- if err != nil {
- slog.Error("copyFile", "err", err)
- }
- }(source)
-
- destination, err := os.Create(dst)
- if err != nil {
- return 0, err
- }
- defer func(destination *os.File) {
- err := destination.Close()
- if err != nil {
- slog.Error("copyFile", "err", err)
- }
- }(destination)
- nBytes, err := io.Copy(destination, source)
- return nBytes, err
-}
-
-func getOnlyLatest(m map[string]version) (map[string]version, error) {
- tmpMap := make(map[string]version)
- for k, v := range m {
- item, ok := tmpMap[k]
- if !ok {
- tmpMap[k] = v
- } else if item.major == v.major && item.minor < v.minor {
- tmpMap[k] = v
- }
- }
- return tmpMap, nil
-}
-
-func (b *Builder) getVersions(dir string) (map[string]version, error) {
- slog.Info("Retrieving versions from subdirs", "func", "getVersions")
-
- res := make(map[string]version)
- children, err := os.ReadDir(path.Join(dir, "services"))
- if err != nil {
- return nil, err
- }
-
- if len(children) < 1 {
- slog.Error("found no children", "dir", path.Join(dir, "services"))
- }
- for _, entry := range children {
- if !entry.IsDir() {
- slog.Info("entry is no dir", "entry", entry.Name())
- continue
- }
- if b.Verbose {
- slog.Info("getting versions", "svc", entry.Name())
- }
- versions, err := os.ReadDir(path.Join(dir, "services", entry.Name()))
- if err != nil {
- return nil, err
- }
-
- m, err2 := b.extractVersions(entry.Name(), versions)
- if err2 != nil {
- return m, err2
- }
- for k, v := range m {
- res[k] = v
- }
- }
- return res, nil
-}
-
-func (b *Builder) extractVersions(service string, versionDirs []os.DirEntry) (map[string]version, error) {
- res := make(map[string]version)
- if len(versionDirs) < 1 {
- slog.Error("list of version directories is empty")
- return nil, nil
- }
- for _, vDir := range versionDirs {
- if !vDir.IsDir() {
- continue
- }
- r := regexp.MustCompile(`v(\d+)([a-z]+)(\d*)`)
- matches := r.FindAllStringSubmatch(vDir.Name(), -1)
- if matches == nil {
- if b.Debug {
- slog.Warn("item did not fulfill regex", "item", vDir.Name())
- }
- continue
- }
- svc, ver, err := handleVersion(service, matches[0])
- if err != nil {
- return nil, err
- }
-
- if svc != nil && ver != nil {
- res[*svc] = *ver
- }
- }
- return res, nil
-}
-
-func handleVersion(service string, match []string) (*string, *version, error) {
- if match == nil {
- fmt.Println("no matches")
- return nil, nil, nil
- }
- verString := match[2]
- if verString != "alpha" && verString != "beta" {
- return nil, nil, errors.New("unsupported version")
- }
- majVer, err := strconv.Atoi(match[1])
- if err != nil {
- return nil, nil, err
- }
- if match[3] == "" {
- match[3] = "0"
- }
- minVer, err := strconv.Atoi(match[3])
- if err != nil {
- return nil, nil, err
- }
- resStr := fmt.Sprintf("%s%s", service, verString)
- return &resStr, &version{verString: verString, major: majVer, minor: minVer}, nil
-}
-
-func (b *Builder) createRepoDir(root, repoUrl, repoName string, skipClone bool) (string, error) {
- targetDir := path.Join(root, repoName)
- if !skipClone {
- if fileExists(targetDir) {
- slog.Warn("target dir exists - skipping", "targetDir", targetDir)
- return targetDir, nil
- }
- out, err := git.Clone(
- clone.Repository(repoUrl),
- clone.Directory(targetDir),
- )
- if err != nil {
- return "", err
- }
- if b.Verbose {
- slog.Info("git clone result", "output", out)
- }
- }
- return targetDir, nil
-}
-
-func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
- if !skipClone {
- if fileExists(targetDir) {
- remErr := os.RemoveAll(targetDir)
- if remErr != nil {
- return remErr
- }
- }
- _, cloneErr := git.Clone(
- clone.Repository(repoUrl),
- clone.Directory(targetDir),
- )
- if cloneErr != nil {
- return cloneErr
- }
- }
- return nil
-}
-
-func getRoot() (*string, error) {
+func (b *Builder) determineRoot() error {
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
out, err := cmd.Output()
if err != nil {
- return nil, err
+ return err
}
lines := strings.Split(string(out), "\n")
- return &lines[0], nil
+ if lines[0] == "" {
+ return fmt.Errorf("unable to determine root directory from git")
+ }
+ b.rootDir = lines[0]
+ if b.Verbose {
+ slog.Info(" ... using root", "dir", b.rootDir)
+ }
+
+ return nil
}
+//func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
+// if !skipClone {
+// if FileExists(targetDir) {
+// remErr := os.RemoveAll(targetDir)
+// if remErr != nil {
+// return remErr
+// }
+// }
+// _, cloneErr := git.Clone(
+// clone.Repository(repoUrl),
+// clone.Directory(targetDir),
+// )
+// if cloneErr != nil {
+// return cloneErr
+// }
+// }
+// return nil
+//}
+
func getTokens(fileName string) ([]string, error) {
fset := token.NewFileSet()
diff --git a/generator/cmd/build/functions.go b/generator/cmd/build/functions.go
new file mode 100644
index 00000000..25c1c2d6
--- /dev/null
+++ b/generator/cmd/build/functions.go
@@ -0,0 +1,119 @@
+package build
+
+import (
+ "fmt"
+ "io"
+ "log/slog"
+ "os"
+ "os/exec"
+ "strings"
+ "text/template"
+)
+
+func FileExists(pathValue string) bool {
+ _, err := os.Stat(pathValue)
+ if os.IsNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(err)
+ }
+ return true
+}
+
+func ucfirst(s string) string {
+ if s == "" {
+ return ""
+ }
+ return strings.ToUpper(s[:1]) + s[1:]
+}
+
+func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
+ fn := template.FuncMap{
+ "ucfirst": ucfirst,
+ }
+
+ tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
+ if err != nil {
+ return err
+ }
+
+ var f *os.File
+ f, err = os.Create(outFile)
+ if err != nil {
+ return err
+ }
+
+ err = tmpl.Execute(f, *data)
+ if err != nil {
+ return err
+ }
+
+ err = f.Close()
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func deleteFiles(fNames ...string) error {
+ for _, fName := range fNames {
+ if _, err := os.Stat(fName); !os.IsNotExist(err) {
+ err = os.Remove(fName)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func copyFile(src, dst string) (int64, error) {
+ sourceFileStat, err := os.Stat(src)
+ if err != nil {
+ return 0, err
+ }
+
+ if !sourceFileStat.Mode().IsRegular() {
+ return 0, fmt.Errorf("%s is not a regular file", src)
+ }
+
+ source, err := os.Open(src)
+ if err != nil {
+ return 0, err
+ }
+ defer func(source *os.File) {
+ err := source.Close()
+ if err != nil {
+ slog.Error("copyFile", "err", err)
+ }
+ }(source)
+
+ destination, err := os.Create(dst)
+ if err != nil {
+ return 0, err
+ }
+ defer func(destination *os.File) {
+ err := destination.Close()
+ if err != nil {
+ slog.Error("copyFile", "err", err)
+ }
+ }(destination)
+ nBytes, err := io.Copy(destination, source)
+ return nBytes, err
+}
+
+func checkCommands(commands []string) error {
+ for _, commandName := range commands {
+ if !commandExists(commandName) {
+ return fmt.Errorf("missing command %s", commandName)
+ }
+ slog.Info(" found", "command", commandName)
+ }
+ return nil
+}
+
+func commandExists(cmd string) bool {
+ _, err := exec.LookPath(cmd)
+ return err == nil
+}
diff --git a/generator/cmd/build/oas-handler.go b/generator/cmd/build/oas-handler.go
new file mode 100644
index 00000000..d4ab5c4a
--- /dev/null
+++ b/generator/cmd/build/oas-handler.go
@@ -0,0 +1,446 @@
+package build
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "log"
+ "log/slog"
+ "os"
+ "os/exec"
+ "path"
+ "regexp"
+ "strings"
+
+ "gopkg.in/yaml.v3"
+
+ "github.com/ldez/go-git-cmd-wrapper/v2/clone"
+ "github.com/ldez/go-git-cmd-wrapper/v2/git"
+)
+
+const (
+ OasRepoName = "stackit-api-specifications"
+ OasRepo = "https://github.com/stackitcloud/stackit-api-specifications.git"
+
+ ResTypeResource = "resources"
+ ResTypeDataSource = "datasources"
+)
+
+type Data struct {
+ ServiceName string `yaml:",omitempty" json:",omitempty"`
+ Versions []Version `yaml:"versions" json:"versions"`
+}
+
+type Version struct {
+ Name string `yaml:"name" json:"name"`
+ Path string `yaml:"path" json:"path"`
+}
+
+var oasTempDir string
+
+func (b *Builder) oasHandler(specDir string) error {
+ if b.Verbose {
+ slog.Info("creating schema files", "dir", specDir)
+ }
+ if _, err := os.Stat(specDir); os.IsNotExist(err) {
+ return fmt.Errorf("spec files directory does not exist")
+ }
+
+ err := b.createRepoDir(b.SkipClone)
+ if err != nil {
+ return fmt.Errorf("%s", err.Error())
+ }
+
+ err2 := b.handleServices(specDir)
+ if err2 != nil {
+ return err2
+ }
+
+ if !b.SkipCleanup {
+ if b.Verbose {
+ slog.Info("Finally removing temporary files and directories")
+ }
+ err := os.RemoveAll(path.Join(b.rootDir, "generated"))
+ if err != nil {
+ slog.Error("RemoveAll", "dir", path.Join(b.rootDir, "generated"), "err", err)
+ return err
+ }
+
+ err = os.RemoveAll(oasTempDir)
+ if err != nil {
+ slog.Error("RemoveAll", "dir", oasTempDir, "err", err)
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (b *Builder) handleServices(specDir string) error {
+ services, err := os.ReadDir(specDir)
+ if err != nil {
+ return err
+ }
+
+ for _, svc := range services {
+ if !svc.IsDir() {
+ continue
+ }
+
+ if b.Verbose {
+ slog.Info(" ... found", "service", svc.Name())
+ }
+ var svcVersions Data
+ svcVersions.ServiceName = svc.Name()
+
+ versionsErr := b.getServiceVersions(path.Join(specDir, svc.Name(), "generator_settings.yml"), &svcVersions)
+ if versionsErr != nil {
+ return versionsErr
+ }
+
+ oasSpecErr := b.generateServiceFiles(&svcVersions)
+ if oasSpecErr != nil {
+ return oasSpecErr
+ }
+ }
+ return nil
+}
+
+func (b *Builder) getServiceVersions(confFile string, data *Data) error {
+ if _, cfgFileErr := os.Stat(confFile); os.IsNotExist(cfgFileErr) {
+ return fmt.Errorf("config file does not exist")
+ }
+
+ fileContent, fileErr := os.ReadFile(confFile)
+ if fileErr != nil {
+ return fileErr
+ }
+ convErr := yaml.Unmarshal(fileContent, &data)
+ if convErr != nil {
+ return convErr
+ }
+
+ return nil
+}
+
+func (b *Builder) createRepoDir(skipClone bool) error {
+ tmpDirName, err := os.MkdirTemp("", "oasbuild")
+ if err != nil {
+ return err
+ }
+ oasTempDir = path.Join(tmpDirName, OasRepoName)
+ slog.Info("Creating oas repo dir", "dir", oasTempDir)
+ if !skipClone {
+ if FileExists(oasTempDir) {
+ slog.Warn("target dir exists - skipping", "targetDir", oasTempDir)
+ return nil
+ }
+ out, cloneErr := git.Clone(
+ clone.Repository(OasRepo),
+ clone.Directory(oasTempDir),
+ )
+ if cloneErr != nil {
+ slog.Error("git clone error", "output", out)
+ return cloneErr
+ }
+ if b.Verbose {
+ slog.Info("git clone result", "output", out)
+ }
+ }
+ return nil
+}
+
+func (b *Builder) generateServiceFiles(data *Data) error {
+ err := os.MkdirAll(path.Join(b.rootDir, "generated", "specs"), 0o750)
+ if err != nil {
+ return err
+ }
+
+ for _, v := range data.Versions {
+ specFiles, specsErr := os.ReadDir(path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name))
+ if specsErr != nil {
+ return specsErr
+ }
+ for _, specFile := range specFiles {
+ if specFile.IsDir() {
+ continue
+ }
+ r := regexp.MustCompile(`^(.*)_config.yml$`)
+ matches := r.FindAllStringSubmatch(specFile.Name(), -1)
+ if matches == nil {
+ slog.Warn(" skipping file (no regex match)", "file", specFile.Name())
+ continue
+ }
+
+ srcSpecFile := path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name, specFile.Name())
+
+ if matches[0][0] != specFile.Name() {
+ return fmt.Errorf("matched filename differs from original filename - this should not happen")
+ }
+ resource := matches[0][1]
+ if b.Verbose {
+ slog.Info(
+ " found service spec",
+ "service",
+ data.ServiceName,
+ "resource",
+ resource,
+ "file",
+ specFile.Name(),
+ )
+ }
+
+ oasFile := path.Join(
+ oasTempDir,
+ "services",
+ data.ServiceName,
+ v.Path,
+ fmt.Sprintf("%s.json", data.ServiceName),
+ )
+ if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
+ slog.Warn(
+ " could not find matching oas",
+ "svc",
+ data.ServiceName,
+ "version",
+ v.Name,
+ )
+ continue
+ }
+
+ // determine correct target service name
+ scName := fmt.Sprintf("%s%s", data.ServiceName, v.Name)
+ scName = strings.ReplaceAll(scName, "-", "")
+
+ specJSONFile := path.Join(
+ b.rootDir,
+ "generated",
+ "specs",
+ fmt.Sprintf("%s_%s_spec.json", scName, resource),
+ )
+
+ cmdErr := b.runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile)
+ if cmdErr != nil {
+ return cmdErr
+ }
+
+ cmdResGenErr := b.runTerraformPluginGenFramework(ResTypeResource, scName, resource, specJSONFile)
+ if cmdResGenErr != nil {
+ return cmdResGenErr
+ }
+
+ cmdDsGenErr := b.runTerraformPluginGenFramework(ResTypeDataSource, scName, resource, specJSONFile)
+ if cmdDsGenErr != nil {
+ return cmdDsGenErr
+ }
+ }
+ }
+
+ return nil
+}
+
+func (b *Builder) runTerraformPluginGenFramework(resType, svcName, resource, specJSONFile string) error {
+ var stdOut, stdErr bytes.Buffer
+ tgtFolder := path.Join(
+ b.rootDir,
+ "stackit",
+ "internal",
+ "services",
+ svcName,
+ resource,
+ fmt.Sprintf("%s_gen", resType),
+ )
+
+ //nolint:gosec // this file is not sensitive, so we can use 0755
+ err := os.MkdirAll(tgtFolder, 0o755)
+ if err != nil {
+ return err
+ }
+
+ var subCmd string
+ switch resType {
+ case ResTypeResource:
+ subCmd = "resources"
+ case ResTypeDataSource:
+ subCmd = "data-sources"
+ default:
+ return fmt.Errorf("unknown resource type given: %s", resType)
+ }
+
+ // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
+ cmd := exec.Command(
+ "tfplugingen-framework",
+ "generate",
+ subCmd,
+ "--input",
+ specJSONFile,
+ "--output",
+ tgtFolder,
+ "--package",
+ svcName,
+ )
+
+ cmd.Stdout = &stdOut
+ cmd.Stderr = &stdErr
+ if err = cmd.Start(); err != nil {
+ slog.Error(fmt.Sprintf("tfplugingen-framework generate %s", resType), "error", err)
+ return err
+ }
+
+ if err = cmd.Wait(); err != nil {
+ var exitErr *exec.ExitError
+ if errors.As(err, &exitErr) {
+ slog.Error(
+ fmt.Sprintf("tfplugingen-framework generate %s", resType),
+ "code",
+ exitErr.ExitCode(),
+ "error",
+ err,
+ "stdout",
+ stdOut.String(),
+ "stderr",
+ stdErr.String(),
+ )
+ return fmt.Errorf("%s", stdErr.String())
+ }
+ if err != nil {
+ slog.Error(
+ fmt.Sprintf("tfplugingen-framework generate %s", resType),
+ "err",
+ err,
+ "stdout",
+ stdOut.String(),
+ "stderr",
+ stdErr.String(),
+ )
+ return err
+ }
+ }
+
+ if resType == ResTypeDataSource {
+ tfAnoErr := b.handleTfTagForDatasourceFile(
+ path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
+ svcName,
+ resource,
+ )
+ if tfAnoErr != nil {
+ return tfAnoErr
+ }
+ }
+
+ return nil
+}
+
+func (b *Builder) runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile string) error {
+ var stdOut, stdErr bytes.Buffer
+
+ // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
+ cmd := exec.Command(
+ "tfplugingen-openapi",
+ "generate",
+ "--config",
+ srcSpecFile,
+ "--output",
+ specJSONFile,
+ oasFile,
+ )
+ cmd.Stdout = &stdOut
+ cmd.Stderr = &stdErr
+
+ if err := cmd.Start(); err != nil {
+ slog.Error(
+ "tfplugingen-openapi generate",
+ "error",
+ err,
+ "stdOut",
+ stdOut.String(),
+ "stdErr",
+ stdErr.String(),
+ )
+ return err
+ }
+
+ if err := cmd.Wait(); err != nil {
+ var exitErr *exec.ExitError
+ if errors.As(err, &exitErr) {
+ slog.Error(
+ "tfplugingen-openapi generate",
+ "code",
+ exitErr.ExitCode(),
+ "error",
+ err,
+ "stdout",
+ stdOut.String(),
+ "stderr",
+ stdErr.String(),
+ )
+ return fmt.Errorf("%s", stdErr.String())
+ }
+ if err != nil {
+ slog.Error(
+ "tfplugingen-openapi generate",
+ "err",
+ err,
+ "stdout",
+ stdOut.String(),
+ "stderr",
+ stdErr.String(),
+ )
+ return err
+ }
+ }
+ if stdOut.Len() > 0 {
+ slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
+ }
+
+ return nil
+}
+
+// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
+func (b *Builder) handleTfTagForDatasourceFile(filePath, service, resource string) error {
+ if b.Verbose {
+ slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
+ }
+ if !FileExists(filePath) {
+ slog.Warn(" could not find file, skipping", "path", filePath)
+ return nil
+ }
+ f, err := os.Open(filePath)
+ if err != nil {
+ return err
+ }
+
+ tmp, err := os.CreateTemp(b.rootDir, "replace-*")
+ if err != nil {
+ return err
+ }
+
+ sc := bufio.NewScanner(f)
+ for sc.Scan() {
+ resLine, err := handleLine(sc.Text())
+ if err != nil {
+ return err
+ }
+ if _, err := tmp.WriteString(resLine + "\n"); err != nil {
+ return err
+ }
+ }
+ if scErr := sc.Err(); scErr != nil {
+ return scErr
+ }
+
+ if err := tmp.Close(); err != nil {
+ return err
+ }
+
+ if err := f.Close(); err != nil {
+ return err
+ }
+
+ //nolint:gosec // path traversal is not a concern here
+ if err := os.Rename(tmp.Name(), filePath); err != nil {
+ log.Fatal(err)
+ }
+ return nil
+}
diff --git a/go.mod b/go.mod
index d7cfc944..5b1a97df 100644
--- a/go.mod
+++ b/go.mod
@@ -2,20 +2,30 @@ module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stac
go 1.25.6
+replace (
+ github.com/stackitcloud/stackit-sdk-go => ../stackit-sdk-generator/sdk-repo-updated
+ github.com/stackitcloud/stackit-sdk-go/services/postgresflex => ../stackit-sdk-generator/sdk-repo-updated/services/postgresflex
+)
+
require (
+ github.com/SladkyCitron/slogcolor v1.8.0
github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/go-cmp v0.7.0
github.com/google/uuid v1.6.0
+ github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0
github.com/hashicorp/terraform-plugin-framework v1.18.0
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
github.com/hashicorp/terraform-plugin-go v0.30.0
github.com/hashicorp/terraform-plugin-log v0.10.0
github.com/hashicorp/terraform-plugin-testing v1.14.0
github.com/iancoleman/strcase v0.3.0
+ github.com/ivanpirog/coloredcobra v1.0.1
github.com/jarcoal/httpmock v1.4.1
github.com/joho/godotenv v1.5.1
+ github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1
+ github.com/spf13/cobra v1.4.0
github.com/stackitcloud/stackit-sdk-go/core v0.22.0
- github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha
+ github.com/stackitcloud/stackit-sdk-go/services/postgresflex v0.0.0-00010101000000-000000000000
github.com/teambition/rrule-go v1.8.2
gopkg.in/yaml.v3 v3.0.1
)
@@ -25,10 +35,9 @@ require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
require (
dario.cat/mergo v1.0.1 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
- github.com/Masterminds/semver/v3 v3.2.1 // indirect
+ github.com/Masterminds/semver/v3 v3.2.0 // indirect
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
github.com/ProtonMail/go-crypto v1.3.0 // indirect
- github.com/SladkyCitron/slogcolor v1.8.0 // indirect
github.com/agext/levenshtein v1.2.3 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
github.com/armon/go-radix v1.0.0 // indirect
@@ -54,19 +63,14 @@ require (
github.com/hashicorp/logutils v1.0.0 // indirect
github.com/hashicorp/terraform-exec v0.25.0 // indirect
github.com/hashicorp/terraform-json v0.27.2 // indirect
- github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1 // indirect
- github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0 // indirect
- github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 // indirect
+ github.com/hashicorp/terraform-plugin-codegen-spec v0.1.0 // indirect
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
github.com/hashicorp/terraform-svchost v0.2.0 // indirect
github.com/hashicorp/yamux v0.1.2 // indirect
github.com/huandu/xstrings v1.4.0 // indirect
- github.com/imdario/mergo v0.3.16 // indirect
+ github.com/imdario/mergo v0.3.15 // indirect
github.com/inconshreveable/mousetrap v1.0.0 // indirect
- github.com/ivanpirog/coloredcobra v1.0.1 // indirect
- github.com/kr/text v0.2.0 // indirect
- github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
@@ -79,8 +83,7 @@ require (
github.com/pb33f/libopenapi v0.15.0 // indirect
github.com/posener/complete v1.2.3 // indirect
github.com/shopspring/decimal v1.3.1 // indirect
- github.com/spf13/cast v1.5.1 // indirect
- github.com/spf13/cobra v1.4.0 // indirect
+ github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/stretchr/testify v1.11.1 // indirect
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
@@ -88,7 +91,7 @@ require (
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
- github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
+ github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
github.com/zclconf/go-cty v1.17.0 // indirect
diff --git a/go.sum b/go.sum
index 0fb47aa2..146ff76d 100644
--- a/go.sum
+++ b/go.sum
@@ -4,8 +4,6 @@ github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJ
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
-github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
-github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
@@ -37,7 +35,6 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8=
github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4=
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
-github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -52,7 +49,10 @@ github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FM
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
+github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
+github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
+github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
@@ -129,14 +129,10 @@ github.com/hashicorp/terraform-exec v0.25.0 h1:Bkt6m3VkJqYh+laFMrWIpy9KHYFITpOyz
github.com/hashicorp/terraform-exec v0.25.0/go.mod h1:dl9IwsCfklDU6I4wq9/StFDp7dNbH/h5AnfS1RmiUl8=
github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU=
github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE=
-github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1 h1:eaI/3dsu2T5QAXbA+7N+B+UBj20GdtYnsRuYypKh3S4=
-github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1/go.mod h1:kpYM23L7NtcfaQdWAN0QFkV/lU0w16qJ2ddAPCI4zAg=
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0 h1:IKpc337XKk50QyQPSxLrHwdqSo1E2XqCMxFkWsZcTvc=
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0/go.mod h1:tT6wl80h7nsMBw+1yZRgJXi+Ys85PUai11weDqysvp4=
github.com/hashicorp/terraform-plugin-codegen-spec v0.1.0 h1:flL5dprli2h54RxewQi6po02am0zXDRq6nsV6c4WQ/I=
github.com/hashicorp/terraform-plugin-codegen-spec v0.1.0/go.mod h1:PQn6bDD8UWoAVJoHXqFk2i/RmLbeQBjbiP38i+E+YIw=
-github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 h1:91dQG1A/DxP6vRz9GiytDTrZTXDbhHPvmpYnAyWA/Vw=
-github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0/go.mod h1:fywrEKpordQypmAjz/HIfm2LuNVmyJ6KDe8XT9GdJxQ=
github.com/hashicorp/terraform-plugin-framework v1.18.0 h1:Xy6OfqSTZfAAKXSlJ810lYvuQvYkOpSUoNMQ9l2L1RA=
github.com/hashicorp/terraform-plugin-framework v1.18.0/go.mod h1:eeFIf68PME+kenJeqSrIcpHhYQK0TOyv7ocKdN4Z35E=
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow=
@@ -165,8 +161,6 @@ github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
-github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
-github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/ivanpirog/coloredcobra v1.0.1 h1:aURSdEmlR90/tSiWS0dMjdwOvCVUeYLfltLfbgNxrN4=
@@ -216,18 +210,21 @@ github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
+github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E=
github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.10.2/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
+github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc=
github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0=
github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c=
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
+github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw=
github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
github.com/pb33f/libopenapi v0.15.0 h1:AoBYIY3HXqDDF8O9kcudlqWaRFZZJmgtueE649oHzIw=
github.com/pb33f/libopenapi v0.15.0/go.mod h1:m+4Pwri31UvcnZjuP8M7TlbR906DXJmMvYsbis234xg=
@@ -252,16 +249,12 @@ github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQ
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
-github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA=
-github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
github.com/spf13/cobra v1.4.0 h1:y+wJpx64xcgO1V+RcnwW0LEHxTKRi2ZDPSBjWnrg88Q=
github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stackitcloud/stackit-sdk-go/core v0.22.0 h1:6rViz7GnNwXSh51Lur5xuDzO8EWSZfN9J0HvEkBKq6c=
github.com/stackitcloud/stackit-sdk-go/core v0.22.0/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI=
-github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha h1:ugpMOMUZGB0yXsWcfe97F7GCdjlexbjFuGD8ZeyMSts=
-github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h1:v5VGvTxLcCdJJmblbhqYalt/MFHcElDfYoy15CMhaWs=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
@@ -287,8 +280,6 @@ github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
-github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
-github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
@@ -415,6 +406,7 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
@@ -422,6 +414,7 @@ gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20191026110619-0b21df46bc1d/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/service_specs/postgres-flex/generator_settings.yml b/service_specs/postgres-flex/generator_settings.yml
new file mode 100644
index 00000000..8e8af766
--- /dev/null
+++ b/service_specs/postgres-flex/generator_settings.yml
@@ -0,0 +1,3 @@
+versions:
+ - name: alpha
+ path: v3alpha1
diff --git a/service_specs/sqlserverflex/generator_settings.yml b/service_specs/sqlserverflex/generator_settings.yml
new file mode 100644
index 00000000..1f92f640
--- /dev/null
+++ b/service_specs/sqlserverflex/generator_settings.yml
@@ -0,0 +1,5 @@
+versions:
+ - name: alpha
+ path: v3alpha1
+ - name: beta
+ path: v3beta1
diff --git a/stackit/internal/services/postgresflexalpha/database/datasource.go b/stackit/internal/services/postgresflexalpha/database/datasource.go
index ead08493..3421bf89 100644
--- a/stackit/internal/services/postgresflexalpha/database/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/database/datasource.go
@@ -10,14 +10,13 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
// Ensure the implementation satisfies the expected interfaces.
@@ -38,7 +37,7 @@ type dataSourceModel struct {
// databaseDataSource is the data source implementation.
type databaseDataSource struct {
- client *postgresflexalpha.APIClient
+ client *v3alpha1api.APIClient
providerData core.ProviderData
}
@@ -144,7 +143,7 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
model *dataSourceModel,
projectId, region, instanceId string,
diags *diag.Diagnostics,
-) (*postgresflexalpha.ListDatabase, error) {
+) (*v3alpha1api.ListDatabase, error) {
isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
@@ -157,14 +156,14 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
}
if isIdSet {
- databaseId := model.DatabaseId.ValueInt64()
+ databaseId := model.DatabaseId.ValueInt32()
ctx = tflog.SetField(ctx, "database_id", databaseId)
- return getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
+ return getDatabaseById(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseId)
}
databaseName := model.Name.ValueString()
ctx = tflog.SetField(ctx, "name", databaseName)
- return getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName)
+ return getDatabaseByName(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseName)
}
// handleReadError centralizes API error handling for the Read operation.
diff --git a/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go
index d5683a6c..b15168b8 100644
--- a/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go
@@ -14,12 +14,12 @@ import (
func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
- "database_id": schema.Int64Attribute{
+ "database_id": schema.Int32Attribute{
Required: true,
Description: "The ID of the database.",
MarkdownDescription: "The ID of the database.",
},
- "tf_original_api_id": schema.Int64Attribute{
+ "tf_original_api_id": schema.Int32Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@@ -59,8 +59,8 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
}
type DatabaseModel struct {
- DatabaseId types.Int64 `tfsdk:"database_id"`
- Id types.Int64 `tfsdk:"tf_original_api_id"`
+ DatabaseId types.Int32 `tfsdk:"database_id"`
+ Id types.Int32 `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Owner types.String `tfsdk:"owner"`
diff --git a/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go b/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go
index b8bc6010..3adf4b28 100644
--- a/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go
@@ -23,7 +23,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
"databases": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
- "id": schema.Int64Attribute{
+ "id": schema.Int32Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@@ -54,7 +54,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
},
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@@ -62,19 +62,19 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Computed: true,
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
- "total_pages": schema.Int64Attribute{
+ "total_pages": schema.Int32Attribute{
Computed: true,
},
- "total_rows": schema.Int64Attribute{
+ "total_rows": schema.Int32Attribute{
Computed: true,
},
},
@@ -100,7 +100,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@@ -131,11 +131,11 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
type DatabasesModel struct {
Databases types.List `tfsdk:"databases"`
InstanceId types.String `tfsdk:"instance_id"`
- Page types.Int64 `tfsdk:"page"`
+ Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
+ Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@@ -174,12 +174,12 @@ func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectV
return nil, diags
}
- idVal, ok := idAttribute.(basetypes.Int64Value)
+ idVal, ok := idAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
+ fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@@ -303,12 +303,12 @@ func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[strin
return NewDatabasesValueUnknown(), diags
}
- idVal, ok := idAttribute.(basetypes.Int64Value)
+ idVal, ok := idAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
+ fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@@ -427,7 +427,7 @@ func (t DatabasesType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = DatabasesValue{}
type DatabasesValue struct {
- Id basetypes.Int64Value `tfsdk:"id"`
+ Id basetypes.Int32Value `tfsdk:"id"`
Name basetypes.StringValue `tfsdk:"name"`
Owner basetypes.StringValue `tfsdk:"owner"`
state attr.ValueState
@@ -439,7 +439,7 @@ func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, er
var val tftypes.Value
var err error
- attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["id"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx)
@@ -503,7 +503,7 @@ func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValu
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
- "id": basetypes.Int64Type{},
+ "id": basetypes.Int32Type{},
"name": basetypes.StringType{},
"owner": basetypes.StringType{},
}
@@ -567,7 +567,7 @@ func (v DatabasesValue) Type(ctx context.Context) attr.Type {
func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "id": basetypes.Int64Type{},
+ "id": basetypes.Int32Type{},
"name": basetypes.StringType{},
"owner": basetypes.StringType{},
}
@@ -608,12 +608,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -626,12 +626,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -662,12 +662,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -680,12 +680,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -775,12 +775,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -793,12 +793,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -829,12 +829,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -847,12 +847,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -937,11 +937,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
+ Page basetypes.Int32Value `tfsdk:"page"`
+ Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@@ -951,11 +951,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -1033,11 +1033,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -1109,10 +1109,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
}
diff --git a/stackit/internal/services/postgresflexalpha/database/functions.go b/stackit/internal/services/postgresflexalpha/database/functions.go
index e67f4926..57973d69 100644
--- a/stackit/internal/services/postgresflexalpha/database/functions.go
+++ b/stackit/internal/services/postgresflexalpha/database/functions.go
@@ -5,7 +5,7 @@ import (
"fmt"
"strings"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
)
// databaseClientReader represents the contract to listing databases from postgresflex.APIClient.
@@ -15,7 +15,7 @@ type databaseClientReader interface {
projectId string,
region string,
instanceId string,
- ) postgresflex.ApiListDatabasesRequestRequest
+ ) v3alpha1api.ApiListDatabasesRequestRequest
}
// getDatabaseById gets a database by its ID.
@@ -23,10 +23,10 @@ func getDatabaseById(
ctx context.Context,
client databaseClientReader,
projectId, region, instanceId string,
- databaseId int64,
-) (*postgresflex.ListDatabase, error) {
- filter := func(db postgresflex.ListDatabase) bool {
- return db.Id != nil && *db.Id == databaseId
+ databaseId int32,
+) (*v3alpha1api.ListDatabase, error) {
+ filter := func(db v3alpha1api.ListDatabase) bool {
+ return db.Id == databaseId
}
return getDatabase(ctx, client, projectId, region, instanceId, filter)
}
@@ -36,9 +36,9 @@ func getDatabaseByName(
ctx context.Context,
client databaseClientReader,
projectId, region, instanceId, databaseName string,
-) (*postgresflex.ListDatabase, error) {
- filter := func(db postgresflex.ListDatabase) bool {
- return db.Name != nil && *db.Name == databaseName
+) (*v3alpha1api.ListDatabase, error) {
+ filter := func(db v3alpha1api.ListDatabase) bool {
+ return db.Name == databaseName
}
return getDatabase(ctx, client, projectId, region, instanceId, filter)
}
@@ -49,8 +49,8 @@ func getDatabase(
ctx context.Context,
client databaseClientReader,
projectId, region, instanceId string,
- filter func(db postgresflex.ListDatabase) bool,
-) (*postgresflex.ListDatabase, error) {
+ filter func(db v3alpha1api.ListDatabase) bool,
+) (*v3alpha1api.ListDatabase, error) {
if projectId == "" || region == "" || instanceId == "" {
return nil, fmt.Errorf("all parameters (project, region, instance) are required")
}
@@ -59,18 +59,18 @@ func getDatabase(
for page := int32(1); ; page++ {
res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId).
- Page(page).Size(pageSize).Sort(postgresflex.DATABASESORT_DATABASE_ID_ASC).Execute()
+ Page(page).Size(pageSize).Sort(v3alpha1api.DATABASESORT_DATABASE_ID_ASC).Execute()
if err != nil {
return nil, fmt.Errorf("requesting database list (page %d): %w", page, err)
}
// If the API returns no databases, we have reached the end of the list.
- if res.Databases == nil || len(*res.Databases) == 0 {
+ if res.Databases == nil || len(res.Databases) == 0 {
break
}
// Iterate over databases to find a match
- for _, db := range *res.Databases {
+ for _, db := range res.Databases {
if filter(db) {
foundDb := db
return &foundDb, nil
@@ -82,10 +82,6 @@ func getDatabase(
}
// cleanString removes leading and trailing quotes which are sometimes returned by the API.
-func cleanString(s *string) *string {
- if s == nil {
- return nil
- }
- res := strings.Trim(*s, "\"")
- return &res
+func cleanString(s string) string {
+ return strings.Trim(s, "\"")
}
diff --git a/stackit/internal/services/postgresflexalpha/database/functions_test.go b/stackit/internal/services/postgresflexalpha/database/functions_test.go
index 4c921b14..cfa14aaa 100644
--- a/stackit/internal/services/postgresflexalpha/database/functions_test.go
+++ b/stackit/internal/services/postgresflexalpha/database/functions_test.go
@@ -7,7 +7,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
)
type mockRequest struct {
@@ -37,28 +37,28 @@ func (m *mockDBClient) ListDatabasesRequest(
}
func TestGetDatabase(t *testing.T) {
- mockResp := func(page int64) (*postgresflex.ListDatabasesResponse, error) {
+ mockResp := func(page int32) (*postgresflex.ListDatabasesResponse, error) {
if page == 1 {
return &postgresflex.ListDatabasesResponse{
Databases: &[]postgresflex.ListDatabase{
- {Id: utils.Ptr(int64(1)), Name: utils.Ptr("first")},
- {Id: utils.Ptr(int64(2)), Name: utils.Ptr("second")},
+ {Id: utils.Ptr(int32(1)), Name: utils.Ptr("first")},
+ {Id: utils.Ptr(int32(2)), Name: utils.Ptr("second")},
},
Pagination: &postgresflex.Pagination{
- Page: utils.Ptr(int64(1)),
- TotalPages: utils.Ptr(int64(2)),
- Size: utils.Ptr(int64(3)),
+ Page: utils.Ptr(int32(1)),
+ TotalPages: utils.Ptr(int32(2)),
+ Size: utils.Ptr(int32(3)),
},
}, nil
}
if page == 2 {
return &postgresflex.ListDatabasesResponse{
- Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int64(3)), Name: utils.Ptr("three")}},
+ Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int32(3)), Name: utils.Ptr("three")}},
Pagination: &postgresflex.Pagination{
- Page: utils.Ptr(int64(2)),
- TotalPages: utils.Ptr(int64(2)),
- Size: utils.Ptr(int64(3)),
+ Page: utils.Ptr(int32(2)),
+ TotalPages: utils.Ptr(int32(2)),
+ Size: utils.Ptr(int32(3)),
},
}, nil
}
@@ -66,9 +66,9 @@ func TestGetDatabase(t *testing.T) {
return &postgresflex.ListDatabasesResponse{
Databases: &[]postgresflex.ListDatabase{},
Pagination: &postgresflex.Pagination{
- Page: utils.Ptr(int64(3)),
- TotalPages: utils.Ptr(int64(2)),
- Size: utils.Ptr(int64(3)),
+ Page: utils.Ptr(int32(3)),
+ TotalPages: utils.Ptr(int32(2)),
+ Size: utils.Ptr(int32(3)),
},
}, nil
}
@@ -80,7 +80,7 @@ func TestGetDatabase(t *testing.T) {
instanceId string
wantErr bool
wantDbName string
- wantDbId int64
+ wantDbId int32
}{
{
description: "Success - Found by name on first page",
@@ -133,7 +133,7 @@ func TestGetDatabase(t *testing.T) {
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
- var currentPage int64
+ var currentPage int32
client := &mockDBClient{
executeRequest: func() postgresflex.ApiListDatabasesRequestRequest {
return &mockRequest{
diff --git a/stackit/internal/services/postgresflexalpha/database/mapper.go b/stackit/internal/services/postgresflexalpha/database/mapper.go
index 89140267..91085d61 100644
--- a/stackit/internal/services/postgresflexalpha/database/mapper.go
+++ b/stackit/internal/services/postgresflexalpha/database/mapper.go
@@ -2,43 +2,42 @@ package postgresflexalpha
import (
"fmt"
- "strconv"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
func mapFields(
- source *postgresflexalpha.ListDatabase,
+ source *v3alpha1api.ListDatabase,
model *dataSourceModel,
region string,
) error {
if source == nil {
return fmt.Errorf("response is nil")
}
- if source.Id == nil || *source.Id == 0 {
+ if source.Id == 0 {
return fmt.Errorf("id not present")
}
if model == nil {
return fmt.Errorf("model given is nil")
}
- var databaseId int64
- if model.DatabaseId.ValueInt64() != 0 {
- databaseId = model.DatabaseId.ValueInt64()
- } else if source.Id != nil {
- databaseId = *source.Id
+ var databaseId int32
+ if model.DatabaseId.ValueInt32() != 0 {
+ databaseId = model.DatabaseId.ValueInt32()
+ } else if source.Id != 0 {
+ databaseId = source.Id
} else {
return fmt.Errorf("database id not present")
}
- model.Id = types.Int64Value(databaseId)
- model.DatabaseId = types.Int64Value(databaseId)
+ model.Id = types.Int32Value(databaseId)
+ model.DatabaseId = types.Int32Value(databaseId)
model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringPointerValue(cleanString(source.Owner))
+ model.Owner = types.StringValue(cleanString(source.Owner))
model.Region = types.StringValue(region)
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
@@ -46,48 +45,48 @@ func mapFields(
model.ProjectId.ValueString(),
region,
model.InstanceId.ValueString(),
- strconv.FormatInt(databaseId, 10),
+ string(databaseId),
)
return nil
}
// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource.
-func mapResourceFields(source *postgresflexalpha.GetDatabaseResponse, model *resourceModel) error {
+func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceModel) error {
if source == nil {
return fmt.Errorf("response is nil")
}
- if source.Id == nil || *source.Id == 0 {
+ if source.Id == 0 {
return fmt.Errorf("id not present")
}
if model == nil {
return fmt.Errorf("model input is nil")
}
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != nil {
- databaseId = *source.Id
+ var databaseId int32
+ if model.Id.ValueInt32() != 0 {
+ databaseId = model.Id.ValueInt32()
+ } else if source.Id != 0 {
+ databaseId = source.Id
} else {
return fmt.Errorf("database id not present")
}
- model.Id = types.Int64Value(databaseId)
- model.DatabaseId = types.Int64Value(databaseId)
+ model.Id = types.Int32Value(databaseId)
+ model.DatabaseId = types.Int32Value(databaseId)
model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringPointerValue(cleanString(source.Owner))
+ model.Owner = types.StringValue(cleanString(source.Owner))
return nil
}
// toCreatePayload converts the resource model to an API create payload.
-func toCreatePayload(model *resourceModel) (*postgresflexalpha.CreateDatabaseRequestPayload, error) {
+func toCreatePayload(model *resourceModel) (*v3alpha1api.CreateDatabaseRequestPayload, error) {
if model == nil {
return nil, fmt.Errorf("nil model")
}
- return &postgresflexalpha.CreateDatabaseRequestPayload{
- Name: model.Name.ValueStringPointer(),
+ return &v3alpha1api.CreateDatabaseRequestPayload{
+ Name: model.Name.ValueString(),
Owner: model.Owner.ValueStringPointer(),
}, nil
}
diff --git a/stackit/internal/services/postgresflexalpha/database/mapper_test.go b/stackit/internal/services/postgresflexalpha/database/mapper_test.go
index 16fd0ce6..6ad21a08 100644
--- a/stackit/internal/services/postgresflexalpha/database/mapper_test.go
+++ b/stackit/internal/services/postgresflexalpha/database/mapper_test.go
@@ -7,7 +7,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
)
@@ -31,7 +31,7 @@ func TestMapFields(t *testing.T) {
name: "should map fields correctly",
given: given{
source: &postgresflexalpha.ListDatabase{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(int32(1)),
Name: utils.Ptr("my-db"),
Owner: utils.Ptr("\"my-owner\""),
},
@@ -46,11 +46,11 @@ func TestMapFields(t *testing.T) {
expected: expected{
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
+ Id: types.Int32Value(1),
Name: types.StringValue("my-db"),
Owner: types.StringValue("my-owner"),
Region: types.StringValue("eu01"),
- DatabaseId: types.Int64Value(1),
+ DatabaseId: types.Int32Value(1),
InstanceId: types.StringValue("my-instance"),
ProjectId: types.StringValue("my-project"),
},
@@ -62,12 +62,12 @@ func TestMapFields(t *testing.T) {
name: "should preserve existing model ID",
given: given{
source: &postgresflexalpha.ListDatabase{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(int32(1)),
Name: utils.Ptr("my-db"),
},
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
+ Id: types.Int32Value(1),
ProjectId: types.StringValue("my-project"),
InstanceId: types.StringValue("my-instance"),
},
@@ -77,9 +77,9 @@ func TestMapFields(t *testing.T) {
expected: expected{
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
+ Id: types.Int32Value(1),
Name: types.StringValue("my-db"),
- Owner: types.StringNull(), DatabaseId: types.Int64Value(1),
+ Owner: types.StringNull(), DatabaseId: types.Int32Value(1),
Region: types.StringValue("eu01"),
InstanceId: types.StringValue("my-instance"),
ProjectId: types.StringValue("my-project"),
@@ -107,7 +107,7 @@ func TestMapFields(t *testing.T) {
{
name: "should fail on nil model",
given: given{
- source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(int64(1))},
+ source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(Int32(1))},
model: nil,
},
expected: expected{err: true},
@@ -150,7 +150,7 @@ func TestMapResourceFields(t *testing.T) {
name: "should map fields correctly",
given: given{
source: &postgresflexalpha.GetDatabaseResponse{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(Int32(1)),
Name: utils.Ptr("my-db"),
Owner: utils.Ptr("my-owner"),
},
@@ -158,10 +158,10 @@ func TestMapResourceFields(t *testing.T) {
},
expected: expected{
model: &resourceModel{
- Id: types.Int64Value(1),
+ Id: types.Int32Value(1),
Name: types.StringValue("my-db"),
Owner: types.StringValue("my-owner"),
- DatabaseId: types.Int64Value(1),
+ DatabaseId: types.Int32Value(1),
},
},
},
diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go
index 2b12c281..d0e92221 100644
--- a/stackit/internal/services/postgresflexalpha/database/resource.go
+++ b/stackit/internal/services/postgresflexalpha/database/resource.go
@@ -14,14 +14,14 @@ import (
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
+ postgresflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- postgresflexalpha3 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
+ postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
)
var (
@@ -43,19 +43,19 @@ func NewDatabaseResource() resource.Resource {
}
// resourceModel describes the resource data model.
-type resourceModel = postgresflexalpha2.DatabaseModel
+type resourceModel = postgresflexalphaResGen.DatabaseModel
// DatabaseResourceIdentityModel describes the resource's identity attributes.
type DatabaseResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
InstanceID types.String `tfsdk:"instance_id"`
- DatabaseID types.Int64 `tfsdk:"database_id"`
+ DatabaseID types.Int32 `tfsdk:"database_id"`
}
// databaseResource is the resource implementation.
type databaseResource struct {
- client *postgresflexalpha.APIClient
+ client *v3alpha1api.APIClient
providerData core.ProviderData
}
@@ -122,7 +122,7 @@ var modifiersFileByte []byte
// Schema defines the schema for the resource.
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := postgresflexalpha2.DatabaseResourceSchema(ctx)
+ s := postgresflexalphaResGen.DatabaseResourceSchema(ctx)
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
if err != nil {
@@ -155,7 +155,7 @@ func (r *databaseResource) IdentitySchema(
"instance_id": identityschema.StringAttribute{
RequiredForImport: true,
},
- "database_id": identityschema.Int64Attribute{
+ "database_id": identityschema.Int32Attribute{
RequiredForImport: true,
},
},
@@ -198,7 +198,7 @@ func (r *databaseResource) Create(
return
}
// Create new database
- databaseResp, err := r.client.CreateDatabaseRequest(
+ databaseResp, err := r.client.DefaultAPI.CreateDatabaseRequest(
ctx,
projectId,
region,
@@ -209,16 +209,17 @@ func (r *databaseResource) Create(
return
}
- if databaseResp == nil || databaseResp.Id == nil {
+ dbID, ok := databaseResp.GetIdOk()
+ if !ok {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
funcErrorSummary,
- "API didn't return database Id. A database might have been created",
+ "API didn't return database Id. A database might although have been created",
)
return
}
- databaseId := *databaseResp.Id
+ databaseId := *dbID
ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = core.LogResponse(ctx)
@@ -227,14 +228,14 @@ func (r *databaseResource) Create(
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
- DatabaseID: types.Int64Value(databaseId),
+ DatabaseID: types.Int32Value(int32(databaseId)),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
return
}
- database, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
+ database, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx)
@@ -286,14 +287,14 @@ func (r *databaseResource) Read(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
region := model.Region.ValueString()
- databaseId := model.DatabaseId.ValueInt64()
+ databaseId := model.DatabaseId.ValueInt32()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "database_id", databaseId)
- databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
+ databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx)
@@ -327,7 +328,7 @@ func (r *databaseResource) Read(
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
- DatabaseID: types.Int64Value(databaseId),
+ DatabaseID: types.Int32Value(databaseId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@@ -361,13 +362,7 @@ func (r *databaseResource) Update(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
region := model.Region.ValueString()
- databaseId64 := model.DatabaseId.ValueInt64()
-
- if databaseId64 > math.MaxInt32 {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
- return
- }
- databaseId := int32(databaseId64) // nolint:gosec // check is performed above
+ databaseId := model.DatabaseId.ValueInt32()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
@@ -383,7 +378,7 @@ func (r *databaseResource) Update(
}
modified := false
- var payload postgresflexalpha.UpdateDatabasePartiallyRequestPayload
+ var payload v3alpha1api.UpdateDatabasePartiallyRequestPayload
if stateModel.Name != model.Name {
payload.Name = model.Name.ValueStringPointer()
modified = true
@@ -400,7 +395,7 @@ func (r *databaseResource) Update(
}
// Update existing database
- err := r.client.UpdateDatabasePartiallyRequest(
+ err := r.client.DefaultAPI.UpdateDatabasePartiallyRequest(
ctx,
projectId,
region,
@@ -414,7 +409,7 @@ func (r *databaseResource) Update(
ctx = core.LogResponse(ctx)
- databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId64).
+ databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx)
@@ -442,7 +437,7 @@ func (r *databaseResource) Update(
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
- DatabaseID: types.Int64Value(databaseId64),
+ DatabaseID: types.Int32Value(databaseId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@@ -500,7 +495,7 @@ func (r *databaseResource) Delete(
ctx = tflog.SetField(ctx, "database_id", databaseId)
// Delete existing record set
- err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseId)
+ err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseId).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
}
@@ -572,7 +567,7 @@ func (r *databaseResource) ImportState(
projectId := identityData.ProjectID.ValueString()
region := identityData.Region.ValueString()
instanceId := identityData.InstanceID.ValueString()
- databaseId := identityData.DatabaseID.ValueInt64()
+ databaseId := identityData.DatabaseID.ValueInt32()
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
@@ -586,14 +581,14 @@ func (r *databaseResource) ImportState(
func (r *databaseResource) extractIdentityData(
model resourceModel,
identity DatabaseResourceIdentityModel,
-) (projectId, region, instanceId string, databaseId int64, err error) {
+) (projectId, region, instanceId string, databaseId int32, err error) {
if !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() {
- databaseId = model.DatabaseId.ValueInt64()
+ databaseId = model.DatabaseId.ValueInt32()
} else {
if identity.DatabaseID.IsNull() || identity.DatabaseID.IsUnknown() {
return "", "", "", 0, fmt.Errorf("database_id not found in config")
}
- databaseId = identity.DatabaseID.ValueInt64()
+ databaseId = identity.DatabaseID.ValueInt32()
}
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
diff --git a/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go b/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go
index 6affc956..54d8b62d 100644
--- a/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go
+++ b/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go
@@ -14,13 +14,13 @@ import (
func DatabaseResourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
- "database_id": schema.Int64Attribute{
+ "database_id": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "The ID of the database.",
MarkdownDescription: "The ID of the database.",
},
- "id": schema.Int64Attribute{
+ "id": schema.Int32Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@@ -64,8 +64,8 @@ func DatabaseResourceSchema(ctx context.Context) schema.Schema {
}
type DatabaseModel struct {
- DatabaseId types.Int64 `tfsdk:"database_id"`
- Id types.Int64 `tfsdk:"id"`
+ DatabaseId types.Int32 `tfsdk:"database_id"`
+ Id types.Int32 `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Owner types.String `tfsdk:"owner"`
diff --git a/stackit/internal/services/postgresflexalpha/flavor/datasource.go b/stackit/internal/services/postgresflexalpha/flavor/datasource.go
index 52c6b779..69c2a0b8 100644
--- a/stackit/internal/services/postgresflexalpha/flavor/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/flavor/datasource.go
@@ -8,8 +8,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@@ -30,13 +30,13 @@ type FlavorModel struct {
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
StorageClass types.String `tfsdk:"storage_class"`
- Cpu types.Int64 `tfsdk:"cpu"`
+ Cpu types.Int32 `tfsdk:"cpu"`
Description types.String `tfsdk:"description"`
Id types.String `tfsdk:"id"`
FlavorId types.String `tfsdk:"flavor_id"`
- MaxGb types.Int64 `tfsdk:"max_gb"`
- Memory types.Int64 `tfsdk:"ram"`
- MinGb types.Int64 `tfsdk:"min_gb"`
+ MaxGb types.Int32 `tfsdk:"max_gb"`
+ Memory types.Int32 `tfsdk:"ram"`
+ MinGb types.Int32 `tfsdk:"min_gb"`
NodeType types.String `tfsdk:"node_type"`
StorageClasses types.List `tfsdk:"storage_classes"`
}
@@ -48,7 +48,7 @@ func NewFlavorDataSource() datasource.DataSource {
// flavorDataSource is the data source implementation.
type flavorDataSource struct {
- client *postgresflexalpha.APIClient
+ client *v3alpha1api.APIClient
providerData core.ProviderData
}
@@ -86,12 +86,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor description.",
MarkdownDescription: "The flavor description.",
},
- "cpu": schema.Int64Attribute{
+ "cpu": schema.Int32Attribute{
Required: true,
Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.",
},
- "ram": schema.Int64Attribute{
+ "ram": schema.Int32Attribute{
Required: true,
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
@@ -116,12 +116,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor id of the instance flavor.",
MarkdownDescription: "The flavor id of the instance flavor.",
},
- "max_gb": schema.Int64Attribute{
+ "max_gb": schema.Int32Attribute{
Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
- "min_gb": schema.Int64Attribute{
+ "min_gb": schema.Int32Attribute{
Computed: true,
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@@ -138,10 +138,10 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
"class": schema.StringAttribute{
Computed: true,
},
- "max_io_per_sec": schema.Int64Attribute{
+ "max_io_per_sec": schema.Int32Attribute{
Computed: true,
},
- "max_through_in_mb": schema.Int64Attribute{
+ "max_through_in_mb": schema.Int32Attribute{
Computed: true,
},
},
@@ -171,25 +171,25 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region)
- flavors, err := getAllFlavors(ctx, r.client, projectId, region)
+ flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
return
}
- var foundFlavors []postgresflexalpha.ListFlavors
+ var foundFlavors []v3alpha1api.ListFlavors
for _, flavor := range flavors {
- if model.Cpu.ValueInt64() != *flavor.Cpu {
+ if model.Cpu.ValueInt32() != flavor.Cpu {
continue
}
- if model.Memory.ValueInt64() != *flavor.Memory {
+ if model.Memory.ValueInt32() != flavor.Memory {
continue
}
- if model.NodeType.ValueString() != *flavor.NodeType {
+ if model.NodeType.ValueString() != flavor.NodeType {
continue
}
- for _, sc := range *flavor.StorageClasses {
- if model.StorageClass.ValueString() != *sc.Class {
+ for _, sc := range flavor.StorageClasses {
+ if model.StorageClass.ValueString() != sc.Class {
continue
}
foundFlavors = append(foundFlavors, flavor)
@@ -205,11 +205,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
}
f := foundFlavors[0]
- model.Description = types.StringValue(*f.Description)
- model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
- model.FlavorId = types.StringValue(*f.Id)
- model.MaxGb = types.Int64Value(*f.MaxGB)
- model.MinGb = types.Int64Value(*f.MinGB)
+ model.Description = types.StringValue(f.Description)
+ model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id)
+ model.FlavorId = types.StringValue(f.Id)
+ model.MaxGb = types.Int32Value(f.MaxGB)
+ model.MinGb = types.Int32Value(f.MinGB)
if f.StorageClasses == nil {
model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{
@@ -219,15 +219,15 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
})
} else {
var scList []attr.Value
- for _, sc := range *f.StorageClasses {
+ for _, sc := range f.StorageClasses {
scList = append(
scList,
postgresflexalphaGen.NewStorageClassesValueMust(
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
map[string]attr.Value{
- "class": types.StringValue(*sc.Class),
- "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
- "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
+ "class": types.StringValue(sc.Class),
+ "max_io_per_sec": types.Int32Value(sc.MaxIoPerSec),
+ "max_through_in_mb": types.Int32Value(sc.MaxThroughInMb),
},
),
)
diff --git a/stackit/internal/services/postgresflexalpha/flavor/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/postgresflexalpha/flavor/datasources_gen/flavors_data_source_gen.go
index 924d1375..19be2c9e 100644
--- a/stackit/internal/services/postgresflexalpha/flavor/datasources_gen/flavors_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/flavor/datasources_gen/flavors_data_source_gen.go
@@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"flavors": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
- "cpu": schema.Int64Attribute{
+ "cpu": schema.Int32Attribute{
Computed: true,
Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.",
@@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
- "max_gb": schema.Int64Attribute{
+ "max_gb": schema.Int32Attribute{
Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
- "memory": schema.Int64Attribute{
+ "memory": schema.Int32Attribute{
Computed: true,
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
},
- "min_gb": schema.Int64Attribute{
+ "min_gb": schema.Int32Attribute{
Computed: true,
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"class": schema.StringAttribute{
Computed: true,
},
- "max_io_per_sec": schema.Int64Attribute{
+ "max_io_per_sec": schema.Int32Attribute{
Computed: true,
},
- "max_through_in_mb": schema.Int64Attribute{
+ "max_through_in_mb": schema.Int32Attribute{
Computed: true,
},
},
@@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of flavors available for the project.",
MarkdownDescription: "List of flavors available for the project.",
},
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Computed: true,
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
- "total_pages": schema.Int64Attribute{
+ "total_pages": schema.Int32Attribute{
Computed: true,
},
- "total_rows": schema.Int64Attribute{
+ "total_rows": schema.Int32Attribute{
Computed: true,
},
},
@@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@@ -178,11 +178,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
type FlavorsModel struct {
Flavors types.List `tfsdk:"flavors"`
- Page types.Int64 `tfsdk:"page"`
+ Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
+ Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@@ -221,12 +221,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+ cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@@ -275,12 +275,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@@ -293,12 +293,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+ memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@@ -311,12 +311,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+ minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@@ -445,12 +445,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+ cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@@ -499,12 +499,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@@ -517,12 +517,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+ memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@@ -535,12 +535,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+ minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@@ -664,12 +664,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = FlavorsValue{}
type FlavorsValue struct {
- Cpu basetypes.Int64Value `tfsdk:"cpu"`
+ Cpu basetypes.Int32Value `tfsdk:"cpu"`
Description basetypes.StringValue `tfsdk:"description"`
Id basetypes.StringValue `tfsdk:"id"`
- MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
- Memory basetypes.Int64Value `tfsdk:"memory"`
- MinGb basetypes.Int64Value `tfsdk:"min_gb"`
+ MaxGb basetypes.Int32Value `tfsdk:"max_gb"`
+ Memory basetypes.Int32Value `tfsdk:"memory"`
+ MinGb basetypes.Int32Value `tfsdk:"min_gb"`
NodeType basetypes.StringValue `tfsdk:"node_type"`
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
state attr.ValueState
@@ -681,12 +681,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var val tftypes.Value
var err error
- attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["storage_classes"] = basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@@ -821,12 +821,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
}
attributeTypes := map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
+ "cpu": basetypes.Int32Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
+ "max_gb": basetypes.Int32Type{},
+ "memory": basetypes.Int32Type{},
+ "min_gb": basetypes.Int32Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@@ -917,12 +917,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
+ "cpu": basetypes.Int32Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
+ "max_gb": basetypes.Int32Type{},
+ "memory": basetypes.Int32Type{},
+ "min_gb": basetypes.Int32Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@@ -983,12 +983,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@@ -1001,12 +1001,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@@ -1112,12 +1112,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@@ -1130,12 +1130,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@@ -1219,8 +1219,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
type StorageClassesValue struct {
Class basetypes.StringValue `tfsdk:"class"`
- MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
- MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
+ MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"`
+ MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"`
state attr.ValueState
}
@@ -1231,8 +1231,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
var err error
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -1295,8 +1295,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
attributeTypes := map[string]attr.Type{
"class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
+ "max_io_per_sec": basetypes.Int32Type{},
+ "max_through_in_mb": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -1359,8 +1359,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
+ "max_io_per_sec": basetypes.Int32Type{},
+ "max_through_in_mb": basetypes.Int32Type{},
}
}
@@ -1399,12 +1399,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -1417,12 +1417,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -1453,12 +1453,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -1471,12 +1471,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -1566,12 +1566,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -1584,12 +1584,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -1620,12 +1620,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -1638,12 +1638,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -1728,11 +1728,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
+ Page basetypes.Int32Value `tfsdk:"page"`
+ Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@@ -1742,11 +1742,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -1824,11 +1824,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -1900,10 +1900,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
}
diff --git a/stackit/internal/services/postgresflexalpha/flavor/functions.go b/stackit/internal/services/postgresflexalpha/flavor/functions.go
index 67c7f9fa..948c51ad 100644
--- a/stackit/internal/services/postgresflexalpha/flavor/functions.go
+++ b/stackit/internal/services/postgresflexalpha/flavor/functions.go
@@ -4,21 +4,21 @@ import (
"context"
"fmt"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
)
type flavorsClientReader interface {
GetFlavorsRequest(
ctx context.Context,
projectId, region string,
- ) postgresflex.ApiGetFlavorsRequestRequest
+ ) v3alpha1api.ApiGetFlavorsRequestRequest
}
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
- []postgresflex.ListFlavors,
+ []v3alpha1api.ListFlavors,
error,
) {
- getAllFilter := func(_ postgresflex.ListFlavors) bool { return true }
+ getAllFilter := func(_ v3alpha1api.ListFlavors) bool { return true }
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
if err != nil {
return nil, err
@@ -32,29 +32,29 @@ func getFlavorsByFilter(
ctx context.Context,
client flavorsClientReader,
projectId, region string,
- filter func(db postgresflex.ListFlavors) bool,
-) ([]postgresflex.ListFlavors, error) {
+ filter func(db v3alpha1api.ListFlavors) bool,
+) ([]v3alpha1api.ListFlavors, error) {
if projectId == "" || region == "" {
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
}
const pageSize = 25
- var result = make([]postgresflex.ListFlavors, 0)
+ var result = make([]v3alpha1api.ListFlavors, 0)
for page := int32(1); ; page++ {
res, err := client.GetFlavorsRequest(ctx, projectId, region).
- Page(page).Size(pageSize).Sort(postgresflex.FLAVORSORT_ID_ASC).Execute()
+ Page(page).Size(pageSize).Sort(v3alpha1api.FLAVORSORT_ID_ASC).Execute()
if err != nil {
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
}
// If the API returns no flavors, we have reached the end of the list.
- if res.Flavors == nil || len(*res.Flavors) == 0 {
+ if len(res.Flavors) == 0 {
break
}
- for _, flavor := range *res.Flavors {
+ for _, flavor := range res.Flavors {
if filter(flavor) {
result = append(result, flavor)
}
diff --git a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go
index bb7180c1..6f9f6981 100644
--- a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go
+++ b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go
@@ -4,9 +4,7 @@ import (
"context"
"testing"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
)
type mockRequest struct {
@@ -30,25 +28,25 @@ func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) po
return m.executeRequest()
}
-var mockResp = func(page int64) (*postgresflex.GetFlavorsResponse, error) {
+var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) {
if page == 1 {
return &postgresflex.GetFlavorsResponse{
- Flavors: &[]postgresflex.ListFlavors{
- {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
- {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
+ Flavors: []postgresflex.ListFlavors{
+ {Id: "flavor-1", Description: "first"},
+ {Id: "flavor-2", Description: "second"},
},
}, nil
}
if page == 2 {
return &postgresflex.GetFlavorsResponse{
- Flavors: &[]postgresflex.ListFlavors{
- {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
+ Flavors: []postgresflex.ListFlavors{
+ {Id: "flavor-3", Description: "three"},
},
}, nil
}
return &postgresflex.GetFlavorsResponse{
- Flavors: &[]postgresflex.ListFlavors{},
+ Flavors: []postgresflex.ListFlavors{},
}, nil
}
@@ -72,7 +70,7 @@ func TestGetFlavorsByFilter(t *testing.T) {
{
description: "Success - Filter flavors by description",
projectId: "pid", region: "reg",
- filter: func(f postgresflex.ListFlavors) bool { return *f.Description == "first" },
+ filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" },
wantCount: 1,
wantErr: false,
},
@@ -86,10 +84,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
- var currentPage int64
+ var currentPage int32
client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
- return &mockRequest{
+ return mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++
return mockResp(currentPage)
@@ -113,10 +111,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
}
func TestGetAllFlavors(t *testing.T) {
- var currentPage int64
+ var currentPage int32
client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
- return &mockRequest{
+ return mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++
return mockResp(currentPage)
diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasource.go b/stackit/internal/services/postgresflexalpha/flavors/datasource.go
index df8fddac..f5c99a82 100644
--- a/stackit/internal/services/postgresflexalpha/flavors/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/flavors/datasource.go
@@ -5,8 +5,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
@@ -26,7 +26,7 @@ func NewFlavorsDataSource() datasource.DataSource {
type dataSourceModel = postgresflexalphaGen.FlavorsModel
type flavorsDataSource struct {
- client *postgresflexalpha.APIClient
+ client *v3alpha1api.APIClient
providerData core.ProviderData
}
diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
index e0b76221..54aefa23 100644
--- a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
@@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"flavors": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
- "cpu": schema.Int64Attribute{
+ "cpu": schema.Int32Attribute{
Computed: true,
Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.",
@@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
- "max_gb": schema.Int64Attribute{
+ "max_gb": schema.Int32Attribute{
Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
- "memory": schema.Int64Attribute{
+ "memory": schema.Int32Attribute{
Computed: true,
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
},
- "min_gb": schema.Int64Attribute{
+ "min_gb": schema.Int32Attribute{
Computed: true,
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"class": schema.StringAttribute{
Computed: true,
},
- "max_io_per_sec": schema.Int64Attribute{
+ "max_io_per_sec": schema.Int32Attribute{
Computed: true,
},
- "max_through_in_mb": schema.Int64Attribute{
+ "max_through_in_mb": schema.Int32Attribute{
Computed: true,
},
},
@@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of flavors available for the project.",
MarkdownDescription: "List of flavors available for the project.",
},
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Computed: true,
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
- "total_pages": schema.Int64Attribute{
+ "total_pages": schema.Int32Attribute{
Computed: true,
},
- "total_rows": schema.Int64Attribute{
+ "total_rows": schema.Int32Attribute{
Computed: true,
},
},
@@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@@ -176,11 +176,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
type FlavorsModel struct {
Flavors types.List `tfsdk:"flavors"`
- Page types.Int64 `tfsdk:"page"`
+ Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
+ Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@@ -219,12 +219,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+ cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@@ -273,12 +273,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@@ -291,12 +291,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+ memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@@ -309,12 +309,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+ minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@@ -443,12 +443,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+ cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@@ -497,12 +497,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@@ -515,12 +515,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+ memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@@ -533,12 +533,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+ minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@@ -662,12 +662,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = FlavorsValue{}
type FlavorsValue struct {
- Cpu basetypes.Int64Value `tfsdk:"cpu"`
+ Cpu basetypes.Int32Value `tfsdk:"cpu"`
Description basetypes.StringValue `tfsdk:"description"`
Id basetypes.StringValue `tfsdk:"id"`
- MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
- Memory basetypes.Int64Value `tfsdk:"memory"`
- MinGb basetypes.Int64Value `tfsdk:"min_gb"`
+ MaxGb basetypes.Int32Value `tfsdk:"max_gb"`
+ Memory basetypes.Int32Value `tfsdk:"memory"`
+ MinGb basetypes.Int32Value `tfsdk:"min_gb"`
NodeType basetypes.StringValue `tfsdk:"node_type"`
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
state attr.ValueState
@@ -679,12 +679,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var val tftypes.Value
var err error
- attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["storage_classes"] = basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@@ -819,12 +819,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
}
attributeTypes := map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
+ "cpu": basetypes.Int32Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
+ "max_gb": basetypes.Int32Type{},
+ "memory": basetypes.Int32Type{},
+ "min_gb": basetypes.Int32Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@@ -915,12 +915,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
+ "cpu": basetypes.Int32Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
+ "max_gb": basetypes.Int32Type{},
+ "memory": basetypes.Int32Type{},
+ "min_gb": basetypes.Int32Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@@ -981,12 +981,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@@ -999,12 +999,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@@ -1110,12 +1110,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@@ -1128,12 +1128,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@@ -1217,8 +1217,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
type StorageClassesValue struct {
Class basetypes.StringValue `tfsdk:"class"`
- MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
- MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
+ MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"`
+ MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"`
state attr.ValueState
}
@@ -1229,8 +1229,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
var err error
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -1293,8 +1293,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
attributeTypes := map[string]attr.Type{
"class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
+ "max_io_per_sec": basetypes.Int32Type{},
+ "max_through_in_mb": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -1357,8 +1357,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
+ "max_io_per_sec": basetypes.Int32Type{},
+ "max_through_in_mb": basetypes.Int32Type{},
}
}
@@ -1397,12 +1397,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -1415,12 +1415,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -1451,12 +1451,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -1469,12 +1469,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -1564,12 +1564,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -1582,12 +1582,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -1618,12 +1618,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -1636,12 +1636,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -1726,11 +1726,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
+ Page basetypes.Int32Value `tfsdk:"page"`
+ Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@@ -1740,11 +1740,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -1822,11 +1822,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -1898,10 +1898,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
}
diff --git a/stackit/internal/services/postgresflexalpha/instance/datasource.go b/stackit/internal/services/postgresflexalpha/instance/datasource.go
index 6a4296a4..cd7048e3 100644
--- a/stackit/internal/services/postgresflexalpha/instance/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/instance/datasource.go
@@ -6,8 +6,8 @@ import (
"net/http"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@@ -37,7 +37,7 @@ type dataSourceModel struct {
// instanceDataSource is the data source implementation.
type instanceDataSource struct {
- client *postgresflexalpha.APIClient
+ client *v3alpha1api.APIClient
providerData core.ProviderData
}
@@ -96,7 +96,7 @@ func (r *instanceDataSource) Read(
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
- instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
utils.LogError(
ctx,
diff --git a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go
index 58f88e01..61a3cc0b 100644
--- a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go
@@ -40,7 +40,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The host of the instance.",
MarkdownDescription: "The host of the instance.",
},
- "port": schema.Int64Attribute{
+ "port": schema.Int32Attribute{
Computed: true,
Description: "The port of the instance.",
MarkdownDescription: "The port of the instance.",
@@ -164,12 +164,12 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
- "replicas": schema.Int64Attribute{
+ "replicas": schema.Int32Attribute{
Computed: true,
Description: "How many replicas the instance should have.",
MarkdownDescription: "How many replicas the instance should have.",
},
- "retention_days": schema.Int64Attribute{
+ "retention_days": schema.Int32Attribute{
Computed: true,
Description: "How long backups are retained. The value can only be between 32 and 365 days.",
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 365 days.",
@@ -186,7 +186,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The storage class for the storage.",
MarkdownDescription: "The storage class for the storage.",
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Computed: true,
Description: "The storage size in Gigabytes.",
MarkdownDescription: "The storage size in Gigabytes.",
@@ -223,8 +223,8 @@ type InstanceModel struct {
Network NetworkValue `tfsdk:"network"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- Replicas types.Int64 `tfsdk:"replicas"`
- RetentionDays types.Int64 `tfsdk:"retention_days"`
+ Replicas types.Int32 `tfsdk:"replicas"`
+ RetentionDays types.Int32 `tfsdk:"retention_days"`
Status types.String `tfsdk:"status"`
Storage StorageValue `tfsdk:"storage"`
Version types.String `tfsdk:"version"`
@@ -634,12 +634,12 @@ func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue
return nil, diags
}
- portVal, ok := portAttribute.(basetypes.Int64Value)
+ portVal, ok := portAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
+ fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
}
if diags.HasError() {
@@ -744,12 +744,12 @@ func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]at
return NewWriteValueUnknown(), diags
}
- portVal, ok := portAttribute.(basetypes.Int64Value)
+ portVal, ok := portAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
+ fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
}
if diags.HasError() {
@@ -832,7 +832,7 @@ var _ basetypes.ObjectValuable = WriteValue{}
type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"`
- Port basetypes.Int64Value `tfsdk:"port"`
+ Port basetypes.Int32Value `tfsdk:"port"`
state attr.ValueState
}
@@ -843,7 +843,7 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
var err error
attrTypes["host"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["port"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["port"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -898,7 +898,7 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
attributeTypes := map[string]attr.Type{
"host": basetypes.StringType{},
- "port": basetypes.Int64Type{},
+ "port": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -956,7 +956,7 @@ func (v WriteValue) Type(ctx context.Context) attr.Type {
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"host": basetypes.StringType{},
- "port": basetypes.Int64Type{},
+ "port": basetypes.Int32Type{},
}
}
@@ -2020,12 +2020,12 @@ func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@@ -2130,12 +2130,12 @@ func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewStorageValueUnknown(), diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@@ -2218,7 +2218,7 @@ var _ basetypes.ObjectValuable = StorageValue{}
type StorageValue struct {
PerformanceClass basetypes.StringValue `tfsdk:"performance_class"`
- Size basetypes.Int64Value `tfsdk:"size"`
+ Size basetypes.Int32Value `tfsdk:"size"`
state attr.ValueState
}
@@ -2229,7 +2229,7 @@ func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var err error
attrTypes["performance_class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -2284,7 +2284,7 @@ func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
attributeTypes := map[string]attr.Type{
"performance_class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
+ "size": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -2342,6 +2342,6 @@ func (v StorageValue) Type(ctx context.Context) attr.Type {
func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"performance_class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
+ "size": basetypes.Int32Type{},
}
}
diff --git a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go
index 0407c13f..a8138d79 100644
--- a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go
@@ -54,7 +54,7 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of owned instances and their current status.",
MarkdownDescription: "List of owned instances and their current status.",
},
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@@ -62,19 +62,19 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Computed: true,
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
- "total_pages": schema.Int64Attribute{
+ "total_pages": schema.Int32Attribute{
Computed: true,
},
- "total_rows": schema.Int64Attribute{
+ "total_rows": schema.Int32Attribute{
Computed: true,
},
},
@@ -100,7 +100,7 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@@ -130,11 +130,11 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
type InstancesModel struct {
Instances types.List `tfsdk:"instances"`
- Page types.Int64 `tfsdk:"page"`
+ Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
+ Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@@ -662,12 +662,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -680,12 +680,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -716,12 +716,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -734,12 +734,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -829,12 +829,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -847,12 +847,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -883,12 +883,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -901,12 +901,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -991,11 +991,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
+ Page basetypes.Int32Value `tfsdk:"page"`
+ Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@@ -1005,11 +1005,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -1087,11 +1087,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -1163,10 +1163,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
}
diff --git a/stackit/internal/services/postgresflexalpha/instance/functions.go b/stackit/internal/services/postgresflexalpha/instance/functions.go
index 28a567d2..5f194ff4 100644
--- a/stackit/internal/services/postgresflexalpha/instance/functions.go
+++ b/stackit/internal/services/postgresflexalpha/instance/functions.go
@@ -7,8 +7,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalphadatasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
@@ -33,9 +33,7 @@ func mapGetInstanceResponseToModel(
)
}
- isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
- resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
- resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
+ isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0
if isConnectionInfoIncomplete {
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull()
@@ -46,8 +44,8 @@ func mapGetInstanceResponseToModel(
"write": postgresflexalpharesource.NewWriteValueMust(
postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx),
map[string]attr.Value{
- "host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
- "port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
+ "host": types.StringValue(resp.ConnectionInfo.Write.Host),
+ "port": types.Int32Value(resp.ConnectionInfo.Write.Port),
},
),
},
@@ -62,7 +60,7 @@ func mapGetInstanceResponseToModel(
m.InstanceId.ValueString(),
)
}
- m.InstanceId = types.StringPointerValue(resp.Id)
+ m.InstanceId = types.StringValue(resp.Id)
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
@@ -75,12 +73,12 @@ func mapGetInstanceResponseToModel(
netInstAdd := types.StringValue("")
if instAdd, ok := resp.Network.GetInstanceAddressOk(); ok {
- netInstAdd = types.StringValue(instAdd)
+ netInstAdd = types.StringValue(*instAdd)
}
netRtrAdd := types.StringValue("")
if rtrAdd, ok := resp.Network.GetRouterAddressOk(); ok {
- netRtrAdd = types.StringValue(rtrAdd)
+ netRtrAdd = types.StringValue(*rtrAdd)
}
net, diags := postgresflexalpharesource.NewNetworkValue(
@@ -97,8 +95,8 @@ func mapGetInstanceResponseToModel(
}
m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
+ m.Replicas = types.Int32Value(int32(resp.GetReplicas()))
+ m.RetentionDays = types.Int32Value(resp.GetRetentionDays())
m.Name = types.StringValue(resp.GetName())
@@ -108,7 +106,7 @@ func mapGetInstanceResponseToModel(
postgresflexalpharesource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
+ "size": types.Int32Value(resp.Storage.GetSize()),
},
)
if diags.HasError() {
@@ -131,7 +129,7 @@ func mapGetDataInstanceResponseToModel(
m.FlavorId = types.StringValue(resp.GetFlavorId())
m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
- m.InstanceId = types.StringPointerValue(resp.Id)
+ m.InstanceId = types.StringValue(resp.Id)
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
m.Name = types.StringValue(resp.GetName())
@@ -140,14 +138,14 @@ func mapGetDataInstanceResponseToModel(
return err
}
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
+ m.Replicas = types.Int32Value(int32(resp.GetReplicas()))
+ m.RetentionDays = types.Int32Value(resp.GetRetentionDays())
m.Status = types.StringValue(string(resp.GetStatus()))
storage, diags := postgresflexalphadatasource.NewStorageValue(
postgresflexalphadatasource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
+ "size": types.Int32Value(resp.Storage.GetSize()),
},
)
if diags.HasError() {
@@ -159,9 +157,7 @@ func mapGetDataInstanceResponseToModel(
}
func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
- isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
- resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
- resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
+ isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0
if isConnectionInfoIncomplete {
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull()
@@ -172,8 +168,8 @@ func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgre
"write": types.ObjectValueMust(
postgresflexalphadatasource.WriteValue{}.AttributeTypes(ctx),
map[string]attr.Value{
- "host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
- "port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
+ "host": types.StringValue(resp.ConnectionInfo.Write.Host),
+ "port": types.Int32Value(resp.ConnectionInfo.Write.Port),
},
),
},
@@ -189,12 +185,12 @@ func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.G
instAddr := ""
if iA, ok := resp.Network.GetInstanceAddressOk(); ok {
- instAddr = iA
+ instAddr = *iA
}
rtrAddr := ""
if rA, ok := resp.Network.GetRouterAddressOk(); ok {
- rtrAddr = rA
+ rtrAddr = *rA
}
net, diags := postgresflexalphadatasource.NewNetworkValue(
@@ -216,22 +212,22 @@ func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.G
func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
keyId := ""
if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
- keyId = keyIdVal
+ keyId = *keyIdVal
}
keyRingId := ""
if keyRingIdVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
- keyRingId = keyRingIdVal
+ keyRingId = *keyRingIdVal
}
keyVersion := ""
if keyVersionVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
- keyVersion = keyVersionVal
+ keyVersion = *keyVersionVal
}
svcAcc := ""
if svcAccVal, ok := resp.Encryption.GetServiceAccountOk(); ok {
- svcAcc = svcAccVal
+ svcAcc = *svcAccVal
}
m.Encryption = postgresflexalphadatasource.EncryptionValue{
diff --git a/stackit/internal/services/postgresflexalpha/instance/functions_test.go b/stackit/internal/services/postgresflexalpha/instance/functions_test.go
index 406e577f..154d1bd4 100644
--- a/stackit/internal/services/postgresflexalpha/instance/functions_test.go
+++ b/stackit/internal/services/postgresflexalpha/instance/functions_test.go
@@ -5,9 +5,9 @@ import (
"testing"
"github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
utils2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
@@ -17,7 +17,7 @@ func Test_handleConnectionInfo(t *testing.T) {
ctx context.Context
m *dataSourceModel
hostName string
- port int64
+ port int32
}
tests := []struct {
name string
@@ -63,10 +63,10 @@ func Test_handleConnectionInfo(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resp := &postgresflex.GetInstanceResponse{
- ConnectionInfo: &postgresflex.InstanceConnectionInfo{
- Write: &postgresflex.InstanceConnectionInfoWrite{
- Host: utils.Ptr(tt.args.hostName),
- Port: utils.Ptr(tt.args.port),
+ ConnectionInfo: postgresflex.InstanceConnectionInfo{
+ Write: postgresflex.InstanceConnectionInfoWrite{
+ Host: tt.args.hostName,
+ Port: tt.args.port,
},
},
}
@@ -93,7 +93,7 @@ func Test_handleConnectionInfo(t *testing.T) {
if !ok {
t.Errorf("could not find a value for port in connection_info.write")
}
- if !gotPort.Equal(types.Int64Value(tt.args.port)) {
+ if !gotPort.Equal(types.Int32Value(tt.args.port)) {
t.Errorf("port value incorrect: want: %d - got: %s", tt.args.port, gotPort.String())
}
}
diff --git a/stackit/internal/services/postgresflexalpha/instance/resource.go b/stackit/internal/services/postgresflexalpha/instance/resource.go
index d4049fb5..3d3b8930 100644
--- a/stackit/internal/services/postgresflexalpha/instance/resource.go
+++ b/stackit/internal/services/postgresflexalpha/instance/resource.go
@@ -4,7 +4,6 @@ import (
"context"
_ "embed"
"fmt"
- "math"
"net/http"
"strings"
@@ -14,8 +13,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
@@ -50,7 +49,7 @@ type InstanceResourceIdentityModel struct {
// instanceResource is the resource implementation.
type instanceResource struct {
- client *postgresflex.APIClient
+ client *v3alpha1api.APIClient
providerData core.ProviderData
}
@@ -207,15 +206,11 @@ func (r *instanceResource) Create(
return
}
- if model.Replicas.ValueInt64() > math.MaxInt32 {
- resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
- return
- }
- replVal := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
+ replVal := model.Replicas.ValueInt32() // nolint:gosec // check is performed above
payload := modelToCreateInstancePayload(netAcl, model, replVal)
// Create new instance
- createResp, err := r.client.CreateInstanceRequest(
+ createResp, err := r.client.DefaultAPI.CreateInstanceRequest(
ctx,
projectId,
region,
@@ -236,14 +231,14 @@ func (r *instanceResource) Create(
identity := InstanceResourceIdentityModel{
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
+ InstanceID: types.StringPointerValue(instanceId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
return
}
- waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).
+ waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectId, region, *instanceId).
WaitWithContext(ctx)
if err != nil {
core.LogAndAddError(
@@ -279,34 +274,32 @@ func modelToCreateInstancePayload(
netAcl []string,
model postgresflexalpha.InstanceModel,
replVal int32,
-) postgresflex.CreateInstanceRequestPayload {
- var enc *postgresflex.InstanceEncryption
+) v3alpha1api.CreateInstanceRequestPayload {
+ var enc *v3alpha1api.InstanceEncryption
if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
- enc = &postgresflex.InstanceEncryption{
- KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(),
- KekKeyRingId: model.Encryption.KekKeyRingId.ValueStringPointer(),
- KekKeyVersion: model.Encryption.KekKeyVersion.ValueStringPointer(),
- ServiceAccount: model.Encryption.ServiceAccount.ValueStringPointer(),
+ enc = &v3alpha1api.InstanceEncryption{
+ KekKeyId: model.Encryption.KekKeyId.ValueString(),
+ KekKeyRingId: model.Encryption.KekKeyRingId.ValueString(),
+ KekKeyVersion: model.Encryption.KekKeyVersion.ValueString(),
+ ServiceAccount: model.Encryption.ServiceAccount.ValueString(),
}
}
- payload := postgresflex.CreateInstanceRequestPayload{
- BackupSchedule: model.BackupSchedule.ValueStringPointer(),
+ payload := v3alpha1api.CreateInstanceRequestPayload{
+ BackupSchedule: model.BackupSchedule.ValueString(),
Encryption: enc,
- FlavorId: model.FlavorId.ValueStringPointer(),
- Name: model.Name.ValueStringPointer(),
- Network: &postgresflex.InstanceNetworkCreate{
- AccessScope: postgresflex.InstanceNetworkGetAccessScopeAttributeType(
- model.Network.AccessScope.ValueStringPointer(),
- ),
- Acl: &netAcl,
+ FlavorId: model.FlavorId.ValueString(),
+ Name: model.Name.ValueString(),
+ Network: v3alpha1api.InstanceNetworkCreate{
+ AccessScope: (*v3alpha1api.InstanceNetworkAccessScope)(model.Network.AccessScope.ValueStringPointer()),
+ Acl: netAcl,
},
- Replicas: postgresflex.CreateInstanceRequestPayloadGetReplicasAttributeType(&replVal),
- RetentionDays: model.RetentionDays.ValueInt64Pointer(),
- Storage: &postgresflex.StorageCreate{
- PerformanceClass: model.Storage.PerformanceClass.ValueStringPointer(),
- Size: model.Storage.Size.ValueInt64Pointer(),
+ Replicas: v3alpha1api.Replicas(replVal),
+ RetentionDays: model.RetentionDays.ValueInt32(),
+ Storage: v3alpha1api.StorageCreate{
+ PerformanceClass: model.Storage.PerformanceClass.ValueString(),
+ Size: model.Storage.Size.ValueInt32(),
},
- Version: model.Version.ValueStringPointer(),
+ Version: model.Version.ValueString(),
}
return payload
}
@@ -347,7 +340,7 @@ func (r *instanceResource) Read(
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
- instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode == http.StatusNotFound {
@@ -366,7 +359,7 @@ func (r *instanceResource) Read(
return
}
if !model.InstanceId.IsUnknown() && !model.InstanceId.IsNull() {
- if respInstanceID != instanceId {
+ if *respInstanceID != instanceId {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@@ -445,29 +438,24 @@ func (r *instanceResource) Update(
return
}
- if model.Replicas.ValueInt64() > math.MaxInt32 {
- resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
- return
- }
- replInt32 := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
-
- payload := postgresflex.UpdateInstanceRequestPayload{
- BackupSchedule: model.BackupSchedule.ValueStringPointer(),
- FlavorId: model.FlavorId.ValueStringPointer(),
- Name: model.Name.ValueStringPointer(),
- Network: &postgresflex.InstanceNetworkUpdate{
- Acl: &netAcl,
+ replInt32 := model.Replicas.ValueInt32()
+ payload := v3alpha1api.UpdateInstanceRequestPayload{
+ BackupSchedule: model.BackupSchedule.ValueString(),
+ FlavorId: model.FlavorId.ValueString(),
+ Name: model.Name.ValueString(),
+ Network: v3alpha1api.InstanceNetworkUpdate{
+ Acl: netAcl,
},
- Replicas: postgresflex.UpdateInstanceRequestPayloadGetReplicasAttributeType(&replInt32),
- RetentionDays: model.RetentionDays.ValueInt64Pointer(),
- Storage: &postgresflex.StorageUpdate{
- Size: model.Storage.Size.ValueInt64Pointer(),
+ Replicas: v3alpha1api.Replicas(replInt32),
+ RetentionDays: model.RetentionDays.ValueInt32(),
+ Storage: v3alpha1api.StorageUpdate{
+ Size: model.Storage.Size.ValueInt32Pointer(),
},
- Version: model.Version.ValueStringPointer(),
+ Version: model.Version.ValueString(),
}
// Update existing instance
- err := r.client.UpdateInstanceRequest(
+ err := r.client.DefaultAPI.UpdateInstanceRequest(
ctx,
projectId,
region,
@@ -482,7 +470,7 @@ func (r *instanceResource) Update(
waitResp, err := wait.PartialUpdateInstanceWaitHandler(
ctx,
- r.client,
+ r.client.DefaultAPI,
projectId,
region,
instanceId,
@@ -540,7 +528,7 @@ func (r *instanceResource) Delete(
ctx = tflog.SetField(ctx, "region", region)
// Delete existing instance
- err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ err := r.client.DefaultAPI.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
return
@@ -548,7 +536,7 @@ func (r *instanceResource) Delete(
ctx = core.LogResponse(ctx)
- _, err = r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ _, err = r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode != http.StatusNotFound {
diff --git a/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go b/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go
index 7d7969a6..353d2dbd 100644
--- a/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go
+++ b/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go
@@ -5,7 +5,7 @@ package postgresflexalpha
import (
"context"
"fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/int32validator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/diag"
@@ -42,7 +42,7 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
Description: "The host of the instance.",
MarkdownDescription: "The host of the instance.",
},
- "port": schema.Int64Attribute{
+ "port": schema.Int32Attribute{
Computed: true,
Description: "The port of the instance.",
MarkdownDescription: "The port of the instance.",
@@ -178,18 +178,18 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
),
},
},
- "replicas": schema.Int64Attribute{
+ "replicas": schema.Int32Attribute{
Required: true,
Description: "How many replicas the instance should have.",
MarkdownDescription: "How many replicas the instance should have.",
- Validators: []validator.Int64{
- int64validator.OneOf(
+ Validators: []validator.Int32{
+ int32validator.OneOf(
1,
3,
),
},
},
- "retention_days": schema.Int64Attribute{
+ "retention_days": schema.Int32Attribute{
Required: true,
Description: "How long backups are retained. The value can only be between 32 and 365 days.",
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 365 days.",
@@ -206,7 +206,7 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
Description: "The storage class for the storage.",
MarkdownDescription: "The storage class for the storage.",
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Required: true,
Description: "The storage size in Gigabytes.",
MarkdownDescription: "The storage size in Gigabytes.",
@@ -243,8 +243,8 @@ type InstanceModel struct {
Network NetworkValue `tfsdk:"network"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- Replicas types.Int64 `tfsdk:"replicas"`
- RetentionDays types.Int64 `tfsdk:"retention_days"`
+ Replicas types.Int32 `tfsdk:"replicas"`
+ RetentionDays types.Int32 `tfsdk:"retention_days"`
Status types.String `tfsdk:"status"`
Storage StorageValue `tfsdk:"storage"`
Version types.String `tfsdk:"version"`
@@ -654,12 +654,12 @@ func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue
return nil, diags
}
- portVal, ok := portAttribute.(basetypes.Int64Value)
+ portVal, ok := portAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
+ fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
}
if diags.HasError() {
@@ -764,12 +764,12 @@ func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]at
return NewWriteValueUnknown(), diags
}
- portVal, ok := portAttribute.(basetypes.Int64Value)
+ portVal, ok := portAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
+ fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
}
if diags.HasError() {
@@ -852,7 +852,7 @@ var _ basetypes.ObjectValuable = WriteValue{}
type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"`
- Port basetypes.Int64Value `tfsdk:"port"`
+ Port basetypes.Int32Value `tfsdk:"port"`
state attr.ValueState
}
@@ -863,7 +863,7 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
var err error
attrTypes["host"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["port"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["port"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -918,7 +918,7 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
attributeTypes := map[string]attr.Type{
"host": basetypes.StringType{},
- "port": basetypes.Int64Type{},
+ "port": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -976,7 +976,7 @@ func (v WriteValue) Type(ctx context.Context) attr.Type {
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"host": basetypes.StringType{},
- "port": basetypes.Int64Type{},
+ "port": basetypes.Int32Type{},
}
}
@@ -2040,12 +2040,12 @@ func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@@ -2150,12 +2150,12 @@ func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewStorageValueUnknown(), diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@@ -2238,7 +2238,7 @@ var _ basetypes.ObjectValuable = StorageValue{}
type StorageValue struct {
PerformanceClass basetypes.StringValue `tfsdk:"performance_class"`
- Size basetypes.Int64Value `tfsdk:"size"`
+ Size basetypes.Int32Value `tfsdk:"size"`
state attr.ValueState
}
@@ -2249,7 +2249,7 @@ func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var err error
attrTypes["performance_class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -2304,7 +2304,7 @@ func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
attributeTypes := map[string]attr.Type{
"performance_class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
+ "size": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -2362,6 +2362,6 @@ func (v StorageValue) Type(ctx context.Context) attr.Type {
func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"performance_class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
+ "size": basetypes.Int32Type{},
}
}
diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go
index dcf879ba..583cd356 100644
--- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go
+++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go
@@ -17,8 +17,8 @@ import (
"github.com/hashicorp/terraform-plugin-testing/terraform"
"github.com/stackitcloud/stackit-sdk-go/core/config"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- postgresflexalphaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
@@ -33,7 +33,7 @@ import (
const pfx = "stackitprivatepreview_postgresflexalpha"
func TestInstanceResourceSchema(t *testing.T) {
- t.Parallel()
+ // t.Parallel()
ctx := context.Background()
schemaRequest := fwresource.SchemaRequest{}
@@ -142,6 +142,7 @@ func TestAccInstance(t *testing.T) {
updSizeData.Size = 25
updBackupSched := updSizeData
+ // api should complain about more than one daily backup
updBackupSched.BackupSchedule = "30 3 * * *"
/*
@@ -1139,7 +1140,7 @@ func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
}
ctx := context.Background()
- var client *postgresflexalphaPkgGen.APIClient
+ var client *v3alpha1api.APIClient
var err error
var region, projectID string
@@ -1163,7 +1164,7 @@ func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
config.WithEndpoint(testutils.PostgresFlexCustomEndpoint),
)
}
- client, err = postgresflexalphaPkgGen.NewAPIClient(apiClientConfigOptions...)
+ client, err = v3alpha1api.NewAPIClient(apiClientConfigOptions...)
if err != nil {
log.Fatalln(err)
}
@@ -1180,7 +1181,7 @@ func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
instancesToDestroy = append(instancesToDestroy, instanceID)
}
- instancesResp, err := client.ListInstancesRequest(ctx, projectID, region).
+ instancesResp, err := client.DefaultAPI.ListInstancesRequest(ctx, projectID, region).
Size(100).
Execute()
if err != nil {
@@ -1189,25 +1190,25 @@ func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
items := instancesResp.GetInstances()
for i := range items {
- if items[i].Id == nil {
+ if items[i].Id == "" {
continue
}
- if utils.Contains(instancesToDestroy, *items[i].Id) {
- err := client.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, region, *items[i].Id)
+ if utils.Contains(instancesToDestroy, items[i].Id) {
+ err := client.DefaultAPI.DeleteInstanceRequest(ctx, testutils.ProjectId, region, items[i].Id).Execute()
if err != nil {
- return fmt.Errorf("deleting instance %s during CheckDestroy: %w", *items[i].Id, err)
+ return fmt.Errorf("deleting instance %s during CheckDestroy: %w", items[i].Id, err)
}
err = postgresflexalpha.DeleteInstanceWaitHandler(
ctx,
- client,
+ client.DefaultAPI,
testutils.ProjectId,
testutils.Region,
- *items[i].Id,
+ items[i].Id,
15*time.Minute,
10*time.Second,
)
if err != nil {
- return fmt.Errorf("deleting instance %s during CheckDestroy: waiting for deletion %w", *items[i].Id, err)
+ return fmt.Errorf("deleting instance %s during CheckDestroy: waiting for deletion %w", items[i].Id, err)
}
}
}
diff --git a/stackit/internal/services/postgresflexalpha/user/datasource.go b/stackit/internal/services/postgresflexalpha/user/datasource.go
index 0bb991df..9234d254 100644
--- a/stackit/internal/services/postgresflexalpha/user/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/user/datasource.go
@@ -8,8 +8,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@@ -40,7 +40,7 @@ type dataSourceModel struct {
// userDataSource is the data source implementation.
type userDataSource struct {
- client *postgresflex.APIClient
+ client *v3alpha1api.APIClient
providerData core.ProviderData
}
@@ -103,7 +103,7 @@ func (r *userDataSource) Read(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
- userId64 := model.UserId.ValueInt64()
+ userId64 := model.UserId.ValueInt32()
if userId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return
@@ -116,7 +116,7 @@ func (r *userDataSource) Read(
ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "user_id", userId)
- recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
+ recordSetResp, err := r.client.DefaultAPI.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
if err != nil {
handleReadError(ctx, &diags, err, projectId, instanceId, userId)
resp.State.RemoveResource(ctx)
diff --git a/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go
index 29a7cca0..4c4f9337 100644
--- a/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go
@@ -14,7 +14,7 @@ import (
func UserDataSourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
- "tf_original_api_id": schema.Int64Attribute{
+ "tf_original_api_id": schema.Int32Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@@ -55,7 +55,7 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The current status of the user.",
MarkdownDescription: "The current status of the user.",
},
- "user_id": schema.Int64Attribute{
+ "user_id": schema.Int32Attribute{
Required: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@@ -65,12 +65,12 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
}
type UserModel struct {
- Id types.Int64 `tfsdk:"tf_original_api_id"`
+ Id types.Int32 `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Roles types.List `tfsdk:"roles"`
Status types.String `tfsdk:"status"`
- UserId types.Int64 `tfsdk:"user_id"`
+ UserId types.Int32 `tfsdk:"user_id"`
}
diff --git a/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go b/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go
index bc83be6b..93244cb6 100644
--- a/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go
@@ -25,7 +25,7 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
},
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@@ -33,19 +33,19 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
+ "page": schema.Int32Attribute{
Computed: true,
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
- "total_pages": schema.Int64Attribute{
+ "total_pages": schema.Int32Attribute{
Computed: true,
},
- "total_rows": schema.Int64Attribute{
+ "total_rows": schema.Int32Attribute{
Computed: true,
},
},
@@ -71,7 +71,7 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
- "size": schema.Int64Attribute{
+ "size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@@ -96,7 +96,7 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
"users": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
- "id": schema.Int64Attribute{
+ "id": schema.Int32Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@@ -128,11 +128,11 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
type UsersModel struct {
InstanceId types.String `tfsdk:"instance_id"`
- Page types.Int64 `tfsdk:"page"`
+ Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
+ Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
Users types.List `tfsdk:"users"`
}
@@ -172,12 +172,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -190,12 +190,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -226,12 +226,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -244,12 +244,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -339,12 +339,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
+ pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -357,12 +357,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -393,12 +393,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -411,12 +411,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -501,11 +501,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
+ Page basetypes.Int32Value `tfsdk:"page"`
+ Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@@ -515,11 +515,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -597,11 +597,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@@ -673,11 +673,11 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
+ "page": basetypes.Int32Type{},
+ "size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
+ "total_pages": basetypes.Int32Type{},
+ "total_rows": basetypes.Int32Type{},
}
}
@@ -716,12 +716,12 @@ func (t UsersType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue
return nil, diags
}
- idVal, ok := idAttribute.(basetypes.Int64Value)
+ idVal, ok := idAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
+ fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@@ -845,12 +845,12 @@ func NewUsersValue(attributeTypes map[string]attr.Type, attributes map[string]at
return NewUsersValueUnknown(), diags
}
- idVal, ok := idAttribute.(basetypes.Int64Value)
+ idVal, ok := idAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
+ fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@@ -969,7 +969,7 @@ func (t UsersType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = UsersValue{}
type UsersValue struct {
- Id basetypes.Int64Value `tfsdk:"id"`
+ Id basetypes.Int32Value `tfsdk:"id"`
Name basetypes.StringValue `tfsdk:"name"`
Status basetypes.StringValue `tfsdk:"status"`
state attr.ValueState
@@ -981,7 +981,7 @@ func (v UsersValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
var val tftypes.Value
var err error
- attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["id"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx)
@@ -1045,7 +1045,7 @@ func (v UsersValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
- "id": basetypes.Int64Type{},
+ "id": basetypes.Int32Type{},
"name": basetypes.StringType{},
"status": basetypes.StringType{},
}
@@ -1109,7 +1109,7 @@ func (v UsersValue) Type(ctx context.Context) attr.Type {
func (v UsersValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "id": basetypes.Int64Type{},
+ "id": basetypes.Int32Type{},
"name": basetypes.StringType{},
"status": basetypes.StringType{},
}
diff --git a/stackit/internal/services/postgresflexalpha/user/mapper.go b/stackit/internal/services/postgresflexalpha/user/mapper.go
index 952235ca..36d3f388 100644
--- a/stackit/internal/services/postgresflexalpha/user/mapper.go
+++ b/stackit/internal/services/postgresflexalpha/user/mapper.go
@@ -2,18 +2,17 @@ package postgresflexalpha
import (
"fmt"
- "strconv"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
// mapDataSourceFields maps API response to data source model, preserving existing ID.
-func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSourceModel, region string) error {
+func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourceModel, region string) error {
if userResp == nil {
return fmt.Errorf("response is nil")
}
@@ -22,27 +21,25 @@ func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSour
}
user := userResp
- var userId int64
- if model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != nil {
- userId = *user.Id
+ var userId int32
+ if model.UserId.ValueInt32() != 0 {
+ userId = model.UserId.ValueInt32()
} else {
return fmt.Errorf("user id not present")
}
model.TerraformID = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
+ model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), string(userId),
)
- model.UserId = types.Int64Value(userId)
+ model.UserId = types.Int32Value(userId)
model.Name = types.StringValue(user.GetName())
if user.Roles == nil {
model.Roles = types.List(types.SetNull(types.StringType))
} else {
var roles []attr.Value
- for _, role := range *user.Roles {
+ for _, role := range user.Roles {
roles = append(roles, types.StringValue(string(role)))
}
rolesSet, diags := types.SetValue(types.StringType, roles)
@@ -52,24 +49,24 @@ func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSour
model.Roles = types.List(rolesSet)
}
- model.Id = types.Int64Value(userId)
+ model.Id = types.Int32Value(userId)
model.Region = types.StringValue(region)
model.Status = types.StringValue(user.GetStatus())
return nil
}
// toPayloadRoles converts a string slice to the API's role type.
-func toPayloadRoles(roles *[]string) *[]postgresflex.UserRole {
- var userRoles = make([]postgresflex.UserRole, 0, len(*roles))
- for _, role := range *roles {
- userRoles = append(userRoles, postgresflex.UserRole(role))
+func toPayloadRoles(roles []string) []v3alpha1api.UserRole {
+ var userRoles = make([]v3alpha1api.UserRole, 0, len(roles))
+ for _, role := range roles {
+ userRoles = append(userRoles, v3alpha1api.UserRole(role))
}
- return &userRoles
+ return userRoles
}
// toUpdatePayload creates an API update payload from the resource model.
-func toUpdatePayload(model *resourceModel, roles *[]string) (
- *postgresflex.UpdateUserRequestPayload,
+func toUpdatePayload(model *resourceModel, roles []string) (
+ *v3alpha1api.UpdateUserRequestPayload,
error,
) {
if model == nil {
@@ -79,14 +76,14 @@ func toUpdatePayload(model *resourceModel, roles *[]string) (
return nil, fmt.Errorf("nil roles")
}
- return &postgresflex.UpdateUserRequestPayload{
+ return &v3alpha1api.UpdateUserRequestPayload{
Name: model.Name.ValueStringPointer(),
Roles: toPayloadRoles(roles),
}, nil
}
// toCreatePayload creates an API create payload from the resource model.
-func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.CreateUserRequestPayload, error) {
+func toCreatePayload(model *resourceModel, roles []string) (*v3alpha1api.CreateUserRequestPayload, error) {
if model == nil {
return nil, fmt.Errorf("nil model")
}
@@ -94,14 +91,14 @@ func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.Creat
return nil, fmt.Errorf("nil roles")
}
- return &postgresflex.CreateUserRequestPayload{
+ return &v3alpha1api.CreateUserRequestPayload{
Roles: toPayloadRoles(roles),
- Name: model.Name.ValueStringPointer(),
+ Name: model.Name.ValueString(),
}, nil
}
// mapResourceFields maps API response to the resource model, preserving existing ID.
-func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceModel, region string) error {
+func mapResourceFields(userResp *v3alpha1api.GetUserResponse, model *resourceModel, region string) error {
if userResp == nil {
return fmt.Errorf("response is nil")
}
@@ -110,24 +107,24 @@ func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceMo
}
user := userResp
- var userId int64
- if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != nil {
- userId = *user.Id
+ var userId int32
+ if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt32() != 0 {
+ userId = model.UserId.ValueInt32()
+ } else if user.Id != 0 {
+ userId = user.Id
} else {
return fmt.Errorf("user id not present")
}
- model.Id = types.Int64Value(userId)
- model.UserId = types.Int64Value(userId)
- model.Name = types.StringPointerValue(user.Name)
+ model.Id = types.Int32Value(userId)
+ model.UserId = types.Int32Value(userId)
+ model.Name = types.StringValue(user.Name)
if user.Roles == nil {
model.Roles = types.List(types.SetNull(types.StringType))
} else {
var roles []attr.Value
- for _, role := range *user.Roles {
+ for _, role := range user.Roles {
roles = append(roles, types.StringValue(string(role)))
}
rolesSet, diags := types.SetValue(types.StringType, roles)
@@ -137,6 +134,6 @@ func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceMo
model.Roles = types.List(rolesSet)
}
model.Region = types.StringValue(region)
- model.Status = types.StringPointerValue(user.Status)
+ model.Status = types.StringValue(user.Status)
return nil
}
diff --git a/stackit/internal/services/postgresflexalpha/user/mapper_test.go b/stackit/internal/services/postgresflexalpha/user/mapper_test.go
index 5014d4ac..b0be6d91 100644
--- a/stackit/internal/services/postgresflexalpha/user/mapper_test.go
+++ b/stackit/internal/services/postgresflexalpha/user/mapper_test.go
@@ -8,7 +8,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
+
data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
)
@@ -27,8 +28,8 @@ func TestMapDataSourceFields(t *testing.T) {
testRegion,
dataSourceModel{
UserModel: data.UserModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
+ Id: types.Int32Value(1),
+ UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue(""),
@@ -53,8 +54,8 @@ func TestMapDataSourceFields(t *testing.T) {
testRegion,
dataSourceModel{
UserModel: data.UserModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
+ Id: types.Int32Value(1),
+ UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue("username"),
@@ -77,7 +78,7 @@ func TestMapDataSourceFields(t *testing.T) {
{
"null_fields_and_int_conversions",
&postgresflex.GetUserResponse{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(int32(1)),
Roles: &[]postgresflex.UserRole{},
Name: nil,
Status: utils.Ptr("status"),
@@ -85,8 +86,8 @@ func TestMapDataSourceFields(t *testing.T) {
testRegion,
dataSourceModel{
UserModel: data.UserModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
+ Id: types.Int32Value(1),
+ UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue(""),
@@ -160,12 +161,12 @@ func TestMapFieldsCreate(t *testing.T) {
{
"default_values",
&postgresflex.GetUserResponse{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(Int32(1)),
},
testRegion,
resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
+ Id: types.Int32Value(1),
+ UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@@ -180,14 +181,14 @@ func TestMapFieldsCreate(t *testing.T) {
{
"simple_values",
&postgresflex.GetUserResponse{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(Int32(1)),
Name: utils.Ptr("username"),
Status: utils.Ptr("status"),
},
testRegion,
resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
+ Id: types.Int32Value(1),
+ UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue("username"),
@@ -202,14 +203,14 @@ func TestMapFieldsCreate(t *testing.T) {
{
"null_fields_and_int_conversions",
&postgresflex.GetUserResponse{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(Int32(1)),
Name: nil,
Status: nil,
},
testRegion,
resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
+ Id: types.Int32Value(1),
+ UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@@ -281,12 +282,12 @@ func TestMapFields(t *testing.T) {
{
"default_values",
&postgresflex.GetUserResponse{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(Int32(1)),
},
testRegion,
resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(int64(1)),
+ Id: types.Int32Value(1),
+ UserId: types.Int32Value(Int32(1)),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@@ -300,7 +301,7 @@ func TestMapFields(t *testing.T) {
{
"simple_values",
&postgresflex.GetUserResponse{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(Int32(1)),
Roles: &[]postgresflex.UserRole{
"role_1",
"role_2",
@@ -310,8 +311,8 @@ func TestMapFields(t *testing.T) {
},
testRegion,
resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
+ Id: types.Int32Value(1),
+ UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue("username"),
@@ -333,13 +334,13 @@ func TestMapFields(t *testing.T) {
{
"null_fields_and_int_conversions",
&postgresflex.GetUserResponse{
- Id: utils.Ptr(int64(1)),
+ Id: utils.Ptr(Int32(1)),
Name: nil,
},
testRegion,
resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
+ Id: types.Int32Value(1),
+ UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go
index ab7ec563..b45d76bc 100644
--- a/stackit/internal/services/postgresflexalpha/user/resource.go
+++ b/stackit/internal/services/postgresflexalpha/user/resource.go
@@ -12,8 +12,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
@@ -55,12 +55,12 @@ type UserResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
InstanceID types.String `tfsdk:"instance_id"`
- UserID types.Int64 `tfsdk:"user_id"`
+ UserID types.Int32 `tfsdk:"user_id"`
}
// userResource implements the resource handling for a PostgreSQL Flex user.
type userResource struct {
- client *postgresflex.APIClient
+ client *v3alpha1api.APIClient
providerData core.ProviderData
}
@@ -202,14 +202,14 @@ func (r *userResource) Create(
}
// Generate API request body from model
- payload, err := toCreatePayload(&model, &roles)
+ payload, err := toCreatePayload(&model, roles)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err))
return
}
// Create new user
- userResp, err := r.client.CreateUserRequest(
+ userResp, err := r.client.DefaultAPI.CreateUserRequest(
ctx,
arg.projectId,
arg.region,
@@ -221,7 +221,7 @@ func (r *userResource) Create(
}
id, ok := userResp.GetIdOk()
- if !ok || id == 0 {
+ if !ok || *id == 0 {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@@ -230,7 +230,7 @@ func (r *userResource) Create(
)
return
}
- arg.userId = id
+ arg.userId = *id
ctx = tflog.SetField(ctx, "user_id", id)
@@ -241,25 +241,25 @@ func (r *userResource) Create(
ProjectID: types.StringValue(arg.projectId),
Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceId),
- UserID: types.Int64Value(id),
+ UserID: types.Int32Value(*id),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
return
}
- model.Id = types.Int64Value(id)
- model.UserId = types.Int64Value(id)
+ model.Id = types.Int32Value(*id)
+ model.UserId = types.Int32Value(*id)
model.Password = types.StringValue(userResp.GetPassword())
model.Status = types.StringValue(userResp.GetStatus())
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx,
- r.client,
+ r.client.DefaultAPI,
arg.projectId,
arg.instanceId,
arg.region,
- id,
+ *id,
).SetSleepBeforeWait(
10 * time.Second,
).SetTimeout(
@@ -276,7 +276,7 @@ func (r *userResource) Create(
return
}
- if waitResp.Id == nil {
+ if waitResp.Id == 0 {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@@ -285,7 +285,7 @@ func (r *userResource) Create(
)
return
}
- if waitResp.Id == nil || *waitResp.Id != id {
+ if waitResp.Id != *id {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@@ -336,11 +336,11 @@ func (r *userResource) Read(
// Read resource state
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx,
- r.client,
+ r.client.DefaultAPI,
arg.projectId,
arg.instanceId,
arg.region,
- model.UserId.ValueInt64(),
+ model.UserId.ValueInt32(),
).SetSleepBeforeWait(
10 * time.Second,
).SetTimeout(
@@ -357,7 +357,7 @@ func (r *userResource) Read(
return
}
- if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() {
+ if waitResp.Id != model.UserId.ValueInt32() {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@@ -366,7 +366,7 @@ func (r *userResource) Read(
)
return
}
- arg.userId = *waitResp.Id
+ arg.userId = waitResp.Id
ctx = core.LogResponse(ctx)
@@ -375,7 +375,7 @@ func (r *userResource) Read(
ProjectID: types.StringValue(arg.projectId),
Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceId),
- UserID: types.Int64Value(arg.userId),
+ UserID: types.Int32Value(arg.userId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@@ -429,7 +429,7 @@ func (r *userResource) Update(
}
// Generate API request body from model
- payload, err := toUpdatePayload(&model, &roles)
+ payload, err := toUpdatePayload(&model, roles)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Updating API payload: %v", err))
return
@@ -443,7 +443,7 @@ func (r *userResource) Update(
userId := int32(userId64) // nolint:gosec // check is performed above
// Update existing instance
- err = r.client.UpdateUserRequest(
+ err = r.client.DefaultAPI.UpdateUserRequest(
ctx,
arg.projectId,
arg.region,
@@ -462,7 +462,7 @@ func (r *userResource) Update(
ProjectID: types.StringValue(arg.projectId),
Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceId),
- UserID: types.Int64Value(userId64),
+ UserID: types.Int32Value(userId64),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@@ -472,11 +472,11 @@ func (r *userResource) Update(
// Verify update
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx,
- r.client,
+ r.client.DefaultAPI,
arg.projectId,
arg.instanceId,
arg.region,
- model.UserId.ValueInt64(),
+ model.UserId.ValueInt32(),
).SetSleepBeforeWait(
10 * time.Second,
).SetTimeout(
@@ -493,7 +493,7 @@ func (r *userResource) Update(
return
}
- if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() {
+ if waitResp.Id != model.UserId.ValueInt32() {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@@ -502,7 +502,7 @@ func (r *userResource) Update(
)
return
}
- arg.userId = *waitResp.Id
+ arg.userId = waitResp.Id
// Set state to fully populated data
diags = resp.State.Set(ctx, stateModel)
@@ -555,7 +555,7 @@ func (r *userResource) Delete(
userId := int32(userId64) // nolint:gosec // check is performed above
// Delete existing record set
- err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
+ err := r.client.DefaultAPI.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
}
@@ -571,7 +571,7 @@ func (r *userResource) Delete(
// if exists {
// core.LogAndAddError(
// ctx, &resp.Diagnostics, "Error deleting user",
- // fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt64()),
+ // fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt32()),
// )
// return
//}
@@ -598,7 +598,7 @@ func (r *userResource) IdentitySchema(
"instance_id": identityschema.StringAttribute{
RequiredForImport: true,
},
- "user_id": identityschema.Int64Attribute{
+ "user_id": identityschema.Int32Attribute{
RequiredForImport: true,
},
},
@@ -610,7 +610,7 @@ type clientArg struct {
projectId string
instanceId string
region string
- userId int64
+ userId int32
}
// ImportState imports a resource into the Terraform state on success.
@@ -668,7 +668,7 @@ func (r *userResource) ImportState(
projectId := identityData.ProjectID.ValueString()
region := identityData.Region.ValueString()
instanceId := identityData.InstanceID.ValueString()
- userId := identityData.UserID.ValueInt64()
+ userId := identityData.UserID.ValueInt32()
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
@@ -684,15 +684,15 @@ func (r *userResource) extractIdentityData(
identity UserResourceIdentityModel,
) (*clientArg, error) {
var projectId, region, instanceId string
- var userId int64
+ var userId int32
if !model.UserId.IsNull() && !model.UserId.IsUnknown() {
- userId = model.UserId.ValueInt64()
+ userId = model.UserId.ValueInt32()
} else {
if identity.UserID.IsNull() || identity.UserID.IsUnknown() {
return nil, fmt.Errorf("user_id not found in config")
}
- userId = identity.UserID.ValueInt64()
+ userId = identity.UserID.ValueInt32()
}
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
diff --git a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go
index f96d8d93..c513629b 100644
--- a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go
+++ b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go
@@ -14,7 +14,7 @@ import (
func UserResourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
- "id": schema.Int64Attribute{
+ "id": schema.Int32Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@@ -64,7 +64,7 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
Description: "The current status of the user.",
MarkdownDescription: "The current status of the user.",
},
- "user_id": schema.Int64Attribute{
+ "user_id": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "The ID of the user.",
@@ -75,7 +75,7 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
}
type UserModel struct {
- Id types.Int64 `tfsdk:"id"`
+ Id types.Int32 `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Password types.String `tfsdk:"password"`
@@ -83,5 +83,5 @@ type UserModel struct {
Region types.String `tfsdk:"region"`
Roles types.List `tfsdk:"roles"`
Status types.String `tfsdk:"status"`
- UserId types.Int64 `tfsdk:"user_id"`
+ UserId types.Int32 `tfsdk:"user_id"`
}
diff --git a/stackit/internal/services/postgresflexalpha/utils/util.go b/stackit/internal/services/postgresflexalpha/utils/util.go
index 2b6d1de8..35047574 100644
--- a/stackit/internal/services/postgresflexalpha/utils/util.go
+++ b/stackit/internal/services/postgresflexalpha/utils/util.go
@@ -9,7 +9,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/stackitcloud/stackit-sdk-go/core/config"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
diff --git a/stackit/internal/services/postgresflexalpha/utils/util_test.go b/stackit/internal/services/postgresflexalpha/utils/util_test.go
index e0f7a829..3e25e075 100644
--- a/stackit/internal/services/postgresflexalpha/utils/util_test.go
+++ b/stackit/internal/services/postgresflexalpha/utils/util_test.go
@@ -15,7 +15,7 @@ import (
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
)
const (
diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasource.go b/stackit/internal/services/sqlserverflexalpha/database/datasource.go
deleted file mode 100644
index 5155b41c..00000000
--- a/stackit/internal/services/sqlserverflexalpha/database/datasource.go
+++ /dev/null
@@ -1,174 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
-)
-
-var _ datasource.DataSource = (*databaseDataSource)(nil)
-
-const errorPrefix = "[sqlserverflexalpha - Database]"
-
-func NewDatabaseDataSource() datasource.DataSource {
- return &databaseDataSource{}
-}
-
-type dataSourceModel struct {
- sqlserverflexalphaGen.DatabaseModel
- TerraformId types.String `tfsdk:"id"`
-}
-
-type databaseDataSource struct {
- client *sqlserverflexalphaPkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *databaseDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database"
-}
-
-func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexalphaGen.DatabaseDataSourceSchema(ctx)
- resp.Schema.Attributes["id"] = schema.StringAttribute{
- Computed: true,
- Description: "The terraform internal identifier.",
- MarkdownDescription: "The terraform internal identifier.",
- }
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *databaseDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- // Extract identifiers from the plan
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- instanceId := data.InstanceId.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- databaseName := data.DatabaseName.ValueString()
-
- databaseResp, err := d.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId)
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
- // Map response body to schema and populate Computed attribute values
- err = mapFields(databaseResp, &data, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "SQL Server Flex beta database read")
-}
-
-// handleReadError centralizes API error handling for the Read operation.
-func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) {
- utils.LogError(
- ctx,
- diags,
- err,
- "Reading database",
- fmt.Sprintf(
- "Could not retrieve database for instance %q in project %q.",
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusBadRequest: fmt.Sprintf(
- "Invalid request parameters for project %q and instance %q.",
- projectId,
- instanceId,
- ),
- http.StatusNotFound: fmt.Sprintf(
- "Database, instance %q, or project %q not found.",
- instanceId,
- projectId,
- ),
- http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
- },
- )
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper.go b/stackit/internal/services/sqlserverflexalpha/database/mapper.go
deleted file mode 100644
index 55d0e5ae..00000000
--- a/stackit/internal/services/sqlserverflexalpha/database/mapper.go
+++ /dev/null
@@ -1,104 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "fmt"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
-func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSourceModel, region string) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == nil || *source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model given is nil")
- }
-
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != nil {
- databaseId = *source.Id
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseName = types.StringValue(source.GetName())
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\""))
- model.Region = types.StringValue(region)
- model.ProjectId = types.StringValue(model.ProjectId.ValueString())
- model.InstanceId = types.StringValue(model.InstanceId.ValueString())
- model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel())
- model.CollationName = types.StringValue(source.GetCollationName())
-
- model.TerraformId = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(),
- region,
- model.InstanceId.ValueString(),
- model.DatabaseName.ValueString(),
- )
-
- return nil
-}
-
-// mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource.
-func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *resourceModel, region string) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == nil || *source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
-
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != nil {
- databaseId = *source.Id
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseName = types.StringValue(source.GetName())
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\""))
- model.Region = types.StringValue(region)
- model.ProjectId = types.StringValue(model.ProjectId.ValueString())
- model.InstanceId = types.StringValue(model.InstanceId.ValueString())
-
- model.Compatibility = types.Int64Value(source.GetCompatibilityLevel())
- model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel())
-
- model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api
- model.CollationName = types.StringValue(source.GetCollationName())
-
- return nil
-}
-
-// toCreatePayload converts the resource model to an API create payload.
-func toCreatePayload(model *resourceModel) (*sqlserverflexalpha.CreateDatabaseRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- return &sqlserverflexalpha.CreateDatabaseRequestPayload{
- Name: model.Name.ValueStringPointer(),
- Owner: model.Owner.ValueStringPointer(),
- Collation: model.Collation.ValueStringPointer(),
- Compatibility: model.Compatibility.ValueInt64Pointer(),
- }, nil
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go
deleted file mode 100644
index b0daa742..00000000
--- a/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go
+++ /dev/null
@@ -1,232 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
-)
-
-func TestMapFields(t *testing.T) {
- type given struct {
- source *sqlserverflexalpha.GetDatabaseResponse
- model *dataSourceModel
- region string
- }
- type expected struct {
- model *dataSourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &sqlserverflexalpha.GetDatabaseResponse{
- Id: utils.Ptr(int64(1)),
- Name: utils.Ptr("my-db"),
- CollationName: utils.Ptr("collation"),
- CompatibilityLevel: utils.Ptr(int64(150)),
- Owner: utils.Ptr("my-owner"),
- },
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- },
- region: "eu01",
- },
- expected: expected{
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- DatabaseName: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- Region: types.StringValue("eu01"),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- CompatibilityLevel: types.Int64Value(150),
- CollationName: types.StringValue("collation"),
- },
- TerraformId: types.StringValue("my-project,eu01,my-instance,my-db"),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil source ID",
- given: given{
- source: &sqlserverflexalpha.GetDatabaseResponse{Id: nil},
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil model",
- given: given{
- source: &sqlserverflexalpha.GetDatabaseResponse{Id: utils.Ptr(int64(1))},
- model: nil,
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapFields(tc.given.source, tc.given.model, tc.given.region)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapResourceFields(t *testing.T) {
- type given struct {
- source *sqlserverflexalpha.GetDatabaseResponse
- model *resourceModel
- region string
- }
- type expected struct {
- model *resourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &sqlserverflexalpha.GetDatabaseResponse{
- Id: utils.Ptr(int64(1)),
- Name: utils.Ptr("my-db"),
- Owner: utils.Ptr("my-owner"),
- },
- model: &resourceModel{
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- region: "eu01",
- },
- expected: expected{
- model: &resourceModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- Compatibility: types.Int64Value(0),
- CompatibilityLevel: types.Int64Value(0),
- Collation: types.StringValue(""),
- CollationName: types.StringValue(""),
- DatabaseName: types.StringValue("my-db"),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- Region: types.StringValue("eu01"),
- Owner: types.StringValue("my-owner"),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &resourceModel{},
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapResourceFields(tc.given.source, tc.given.model, tc.given.region)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- type given struct {
- model *resourceModel
- }
- type expected struct {
- payload *sqlserverflexalpha.CreateDatabaseRequestPayload
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should convert model to payload",
- given: given{
- model: &resourceModel{
- Name: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- },
- },
- expected: expected{
- payload: &sqlserverflexalpha.CreateDatabaseRequestPayload{
- Name: utils.Ptr("my-db"),
- Owner: utils.Ptr("my-owner"),
- },
- },
- },
- {
- name: "should fail on nil model",
- given: given{model: nil},
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- actual, err := toCreatePayload(tc.given.model)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.payload, actual); diff != "" {
- t.Errorf("payload mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml
deleted file mode 100644
index 1d010ed7..00000000
--- a/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml
+++ /dev/null
@@ -1,51 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'RequiresReplace'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'RequiresReplace'
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'name'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'collation'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'owner'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'database_name'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'collation_name'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'compatibility'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'compatibility_level'
- modifiers:
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go
deleted file mode 100644
index 5b46c52c..00000000
--- a/stackit/internal/services/sqlserverflexalpha/database/resource.go
+++ /dev/null
@@ -1,539 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- _ "embed"
- "errors"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/resources_gen"
-)
-
-var (
- _ resource.Resource = &databaseResource{}
- _ resource.ResourceWithConfigure = &databaseResource{}
- _ resource.ResourceWithImportState = &databaseResource{}
- _ resource.ResourceWithModifyPlan = &databaseResource{}
- _ resource.ResourceWithIdentity = &databaseResource{}
-
- // Define errors
- errDatabaseNotFound = errors.New("database not found")
-)
-
-func NewDatabaseResource() resource.Resource {
- return &databaseResource{}
-}
-
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexalphaResGen.DatabaseModel
-
-type databaseResource struct {
- client *sqlserverflexalpha.APIClient
- providerData core.ProviderData
-}
-
-type DatabaseResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- DatabaseName types.String `tfsdk:"database_name"`
-}
-
-func (r *databaseResource) Metadata(
- _ context.Context,
- req resource.MetadataRequest,
- resp *resource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database"
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexalphaResGen.DatabaseResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-func (r *databaseResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- resp *resource.IdentitySchemaResponse,
-) {
- resp.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "database_name": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *databaseResource) Configure(
- ctx context.Context,
- req resource.ConfigureRequest,
- resp *resource.ConfigureResponse,
-) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
- }
- apiClient, err := sqlserverflexalpha.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "sqlserverflexalpha.Database client configured")
-}
-
-func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data resourceModel
- createErr := "DB create error"
-
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- databaseName := data.Name.ValueString()
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- payLoad := sqlserverflexalpha.CreateDatabaseRequestPayload{}
- if !data.Collation.IsNull() && !data.Collation.IsUnknown() {
- payLoad.Collation = data.Collation.ValueStringPointer()
- }
-
- if !data.Compatibility.IsNull() && !data.Compatibility.IsUnknown() {
- payLoad.Compatibility = data.Compatibility.ValueInt64Pointer()
- }
-
- payLoad.Name = data.Name.ValueStringPointer()
- payLoad.Owner = data.Owner.ValueStringPointer()
-
- createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId).
- CreateDatabaseRequestPayload(payLoad).
- Execute()
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Calling API: %v", err),
- )
- return
- }
-
- if createResp == nil || createResp.Id == nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- "API didn't return database Id. A database might have been created",
- )
- return
- }
-
- databaseId := *createResp.Id
-
- ctx = tflog.SetField(ctx, "database_id", databaseId)
-
- ctx = core.LogResponse(ctx)
-
- // Set data returned by API in identity
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseName: types.StringValue(databaseName),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // TODO: is this necessary to wait for the database-> API say 200 ?
- waitResp, err := wait.CreateDatabaseWaitHandler(
- ctx,
- r.client,
- projectId,
- instanceId,
- region,
- databaseName,
- ).SetSleepBeforeWait(
- 30 * time.Second,
- ).SetTimeout(
- 15 * time.Minute,
- ).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Database creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned id is nil",
- )
- return
- }
-
- if *waitResp.Id != databaseId {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned id is different",
- )
- return
- }
-
- if *waitResp.Owner != data.Owner.ValueString() {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned owner is different",
- )
- return
- }
-
- if *waitResp.Name != data.Name.ValueString() {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned name is different",
- )
- return
- }
-
- database, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- fmt.Sprintf("Getting database details after creation: %v", err),
- )
- return
- }
-
- // Map response body to schema
- err = mapResourceFields(database, &data, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set state to fully populated data
- resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save data into Terraform state
-
- tflog.Info(ctx, "sqlserverflexalpha.Database created")
-}
-
-func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- region := model.Region.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseName := model.DatabaseName.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- databaseResp, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapResourceFields(databaseResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save identity into Terraform state
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseName: types.StringValue(databaseName),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Set refreshed state
- resp.Diagnostics.Append(resp.State.Set(ctx, &model)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexalpha.Database read")
-}
-
-func (r *databaseResource) Update(ctx context.Context, _ resource.UpdateRequest, resp *resource.UpdateResponse) {
- // TODO: Check update api endpoint - not available at the moment, so return an error for now
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating database", "Database can't be updated")
-}
-
-func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Read identity data
- var identityData DatabaseResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- region := model.Region.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseName := model.DatabaseName.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- // Delete existing record set
- err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting database",
- fmt.Sprintf(
- "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId,
- ),
- )
- return
- }
-
- ctx = core.LogResponse(ctx)
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "sqlserverflexalpha.Database deleted")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *databaseResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
-
- var configModel resourceModel
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var planModel resourceModel
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *databaseResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[database_name] Got: %q",
- req.ID,
- ),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), idParts[3])...)
-
- var identityData DatabaseResourceIdentityModel
- identityData.ProjectID = types.StringValue(idParts[0])
- identityData.Region = types.StringValue(idParts[1])
- identityData.InstanceID = types.StringValue(idParts[2])
- identityData.DatabaseName = types.StringValue(idParts[3])
-
- resp.Diagnostics.Append(resp.Identity.Set(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexalpha database state imported")
- return
- }
-
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData DatabaseResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
- databaseName := identityData.DatabaseName.ValueString()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseName)...)
-
- tflog.Info(ctx, "sqlserverflexalpha database state imported")
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go
deleted file mode 100644
index d56aafa5..00000000
--- a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go
+++ /dev/null
@@ -1,355 +0,0 @@
-package sqlserverflexalphaFlavor
-
-import (
- "context"
- "fmt"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor/datasources_gen"
-)
-
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ datasource.DataSource = &flavorDataSource{}
- _ datasource.DataSourceWithConfigure = &flavorDataSource{}
-)
-
-type FlavorModel struct {
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- StorageClass types.String `tfsdk:"storage_class"`
- Cpu types.Int64 `tfsdk:"cpu"`
- Description types.String `tfsdk:"description"`
- Id types.String `tfsdk:"id"`
- FlavorId types.String `tfsdk:"flavor_id"`
- MaxGb types.Int64 `tfsdk:"max_gb"`
- Memory types.Int64 `tfsdk:"ram"`
- MinGb types.Int64 `tfsdk:"min_gb"`
- NodeType types.String `tfsdk:"node_type"`
- StorageClasses types.List `tfsdk:"storage_classes"`
-}
-
-// NewFlavorDataSource is a helper function to simplify the provider implementation.
-func NewFlavorDataSource() datasource.DataSource {
- return &flavorDataSource{}
-}
-
-// flavorDataSource is the data source implementation.
-type flavorDataSource struct {
- client *sqlserverflexalphaPkg.APIClient
- providerData core.ProviderData
-}
-
-// Metadata returns the data source type name.
-func (r *flavorDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_flavor"
-}
-
-// Configure adds the provider configured client to the data source.
-func (r *flavorDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(r.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "SQL Server Flex instance client configured")
-}
-
-func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- Attributes: map[string]schema.Attribute{
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The project ID of the flavor.",
- MarkdownDescription: "The project ID of the flavor.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region of the flavor.",
- MarkdownDescription: "The region of the flavor.",
- },
- "cpu": schema.Int64Attribute{
- Required: true,
- Description: "The cpu count of the instance.",
- MarkdownDescription: "The cpu count of the instance.",
- },
- "ram": schema.Int64Attribute{
- Required: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "storage_class": schema.StringAttribute{
- Required: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "node_type": schema.StringAttribute{
- Required: true,
- Description: "defines the nodeType it can be either single or HA",
- MarkdownDescription: "defines the nodeType it can be either single or HA",
- },
- "flavor_id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "description": schema.StringAttribute{
- Computed: true,
- Description: "The flavor description.",
- MarkdownDescription: "The flavor description.",
- },
- "id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "max_gb": schema.Int64Attribute{
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- "min_gb": schema.Int64Attribute{
- Computed: true,
- Description: "minimum storage which is required to order in Gigabyte.",
- MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- },
- "storage_classes": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Computed: true,
- },
- "max_io_per_sec": schema.Int64Attribute{
- Computed: true,
- },
- "max_through_in_mb": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: sqlserverflexalphaGen.StorageClassesType{
- ObjectType: types.ObjectType{
- AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- },
- //Attributes: map[string]schema.Attribute{
- // "project_id": schema.StringAttribute{
- // Required: true,
- // Description: "The cpu count of the instance.",
- // MarkdownDescription: "The cpu count of the instance.",
- // },
- // "region": schema.StringAttribute{
- // Required: true,
- // Description: "The flavor description.",
- // MarkdownDescription: "The flavor description.",
- // },
- // "cpu": schema.Int64Attribute{
- // Required: true,
- // Description: "The cpu count of the instance.",
- // MarkdownDescription: "The cpu count of the instance.",
- // },
- // "ram": schema.Int64Attribute{
- // Required: true,
- // Description: "The memory of the instance in Gibibyte.",
- // MarkdownDescription: "The memory of the instance in Gibibyte.",
- // },
- // "storage_class": schema.StringAttribute{
- // Required: true,
- // Description: "The memory of the instance in Gibibyte.",
- // MarkdownDescription: "The memory of the instance in Gibibyte.",
- // },
- // "description": schema.StringAttribute{
- // Computed: true,
- // Description: "The flavor description.",
- // MarkdownDescription: "The flavor description.",
- // },
- // "id": schema.StringAttribute{
- // Computed: true,
- // Description: "The terraform id of the instance flavor.",
- // MarkdownDescription: "The terraform id of the instance flavor.",
- // },
- // "flavor_id": schema.StringAttribute{
- // Computed: true,
- // Description: "The flavor id of the instance flavor.",
- // MarkdownDescription: "The flavor id of the instance flavor.",
- // },
- // "max_gb": schema.Int64Attribute{
- // Computed: true,
- // Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- // MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- // },
- // "min_gb": schema.Int64Attribute{
- // Computed: true,
- // Description: "minimum storage which is required to order in Gigabyte.",
- // MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- // },
- // "node_type": schema.StringAttribute{
- // Required: true,
- // Description: "defines the nodeType it can be either single or replica",
- // MarkdownDescription: "defines the nodeType it can be either single or replica",
- // },
- // "storage_classes": schema.ListNestedAttribute{
- // Computed: true,
- // NestedObject: schema.NestedAttributeObject{
- // Attributes: map[string]schema.Attribute{
- // "class": schema.StringAttribute{
- // Computed: true,
- // },
- // "max_io_per_sec": schema.Int64Attribute{
- // Computed: true,
- // },
- // "max_through_in_mb": schema.Int64Attribute{
- // Computed: true,
- // },
- // },
- // CustomType: sqlserverflexalphaGen.StorageClassesType{
- // ObjectType: types.ObjectType{
- // AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
- // },
- // },
- // },
- // },
- // },
- }
-}
-
-func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var model FlavorModel
- diags := req.Config.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- region := r.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- flavors, err := getAllFlavors(ctx, r.client, projectId, region)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
- return
- }
-
- var foundFlavors []sqlserverflexalphaPkg.ListFlavors
- for _, flavor := range flavors {
- if model.Cpu.ValueInt64() != *flavor.Cpu {
- continue
- }
- if model.Memory.ValueInt64() != *flavor.Memory {
- continue
- }
- if model.NodeType.ValueString() != *flavor.NodeType {
- continue
- }
- for _, sc := range *flavor.StorageClasses {
- if model.StorageClass.ValueString() != *sc.Class {
- continue
- }
- foundFlavors = append(foundFlavors, flavor)
- }
- }
- if len(foundFlavors) == 0 {
- resp.Diagnostics.AddError("get flavor", "could not find requested flavor")
- return
- }
- if len(foundFlavors) > 1 {
- resp.Diagnostics.AddError("get flavor", "found too many matching flavors")
- return
- }
-
- f := foundFlavors[0]
- model.Description = types.StringValue(*f.Description)
- model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
- model.FlavorId = types.StringValue(*f.Id)
- model.MaxGb = types.Int64Value(*f.MaxGB)
- model.MinGb = types.Int64Value(*f.MinGB)
-
- if f.StorageClasses == nil {
- model.StorageClasses = types.ListNull(sqlserverflexalphaGen.StorageClassesType{
- ObjectType: basetypes.ObjectType{
- AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
- },
- })
- } else {
- var scList []attr.Value
- for _, sc := range *f.StorageClasses {
- scList = append(
- scList,
- sqlserverflexalphaGen.NewStorageClassesValueMust(
- sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(*sc.Class),
- "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
- "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
- },
- ),
- )
- }
- storageClassesList := types.ListValueMust(
- sqlserverflexalphaGen.StorageClassesType{
- ObjectType: basetypes.ObjectType{
- AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- scList,
- )
- model.StorageClasses = storageClassesList
- }
-
- // Set refreshed state
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQL Server Flex flavors read")
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/datasources_gen/flavor_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/flavor/datasources_gen/flavor_data_source_gen.go
deleted file mode 100644
index d8654cf4..00000000
--- a/stackit/internal/services/sqlserverflexalpha/flavor/datasources_gen/flavor_data_source_gen.go
+++ /dev/null
@@ -1,1909 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexalpha
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func FlavorDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "flavors": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "cpu": schema.Int64Attribute{
- Computed: true,
- Description: "The cpu count of the instance.",
- MarkdownDescription: "The cpu count of the instance.",
- },
- "description": schema.StringAttribute{
- Computed: true,
- Description: "The flavor description.",
- MarkdownDescription: "The flavor description.",
- },
- "id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "max_gb": schema.Int64Attribute{
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- "memory": schema.Int64Attribute{
- Computed: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "min_gb": schema.Int64Attribute{
- Computed: true,
- Description: "minimum storage which is required to order in Gigabyte.",
- MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- },
- "node_type": schema.StringAttribute{
- Computed: true,
- Description: "defines the nodeType it can be either single or HA",
- MarkdownDescription: "defines the nodeType it can be either single or HA",
- },
- "storage_classes": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Computed: true,
- },
- "max_io_per_sec": schema.Int64Attribute{
- Computed: true,
- },
- "max_through_in_mb": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: StorageClassesType{
- ObjectType: types.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- },
- CustomType: FlavorsType{
- ObjectType: types.ObjectType{
- AttrTypes: FlavorsValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "List of flavors available for the project.",
- MarkdownDescription: "List of flavors available for the project.",
- },
- "page": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of the page of items list to be returned.",
- MarkdownDescription: "Number of the page of items list to be returned.",
- },
- "pagination": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
- Computed: true,
- },
- "size": schema.Int64Attribute{
- Computed: true,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- },
- "total_pages": schema.Int64Attribute{
- Computed: true,
- },
- "total_rows": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: PaginationType{
- ObjectType: types.ObjectType{
- AttrTypes: PaginationValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "size": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of items to be returned on each page.",
- MarkdownDescription: "Number of items to be returned on each page.",
- },
- "sort": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "Sorting of the flavors to be returned on each page.",
- MarkdownDescription: "Sorting of the flavors to be returned on each page.",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "index.desc",
- "index.asc",
- "cpu.desc",
- "cpu.asc",
- "flavor_description.asc",
- "flavor_description.desc",
- "id.desc",
- "id.asc",
- "size_max.desc",
- "size_max.asc",
- "ram.desc",
- "ram.asc",
- "size_min.desc",
- "size_min.asc",
- "storage_class.asc",
- "storage_class.desc",
- "node_type.asc",
- "node_type.desc",
- ),
- },
- },
- },
- }
-}
-
-type FlavorModel struct {
- Flavors types.List `tfsdk:"flavors"`
- Page types.Int64 `tfsdk:"page"`
- Pagination PaginationValue `tfsdk:"pagination"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
- Sort types.String `tfsdk:"sort"`
-}
-
-var _ basetypes.ObjectTypable = FlavorsType{}
-
-type FlavorsType struct {
- basetypes.ObjectType
-}
-
-func (t FlavorsType) Equal(o attr.Type) bool {
- other, ok := o.(FlavorsType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t FlavorsType) String() string {
- return "FlavorsType"
-}
-
-func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- cpuAttribute, ok := attributes["cpu"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `cpu is missing from object`)
-
- return nil, diags
- }
-
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
- }
-
- descriptionAttribute, ok := attributes["description"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `description is missing from object`)
-
- return nil, diags
- }
-
- descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return nil, diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- maxGbAttribute, ok := attributes["max_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_gb is missing from object`)
-
- return nil, diags
- }
-
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
- }
-
- memoryAttribute, ok := attributes["memory"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `memory is missing from object`)
-
- return nil, diags
- }
-
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
- }
-
- minGbAttribute, ok := attributes["min_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `min_gb is missing from object`)
-
- return nil, diags
- }
-
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
- }
-
- nodeTypeAttribute, ok := attributes["node_type"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `node_type is missing from object`)
-
- return nil, diags
- }
-
- nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
- }
-
- storageClassesAttribute, ok := attributes["storage_classes"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `storage_classes is missing from object`)
-
- return nil, diags
- }
-
- storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return FlavorsValue{
- Cpu: cpuVal,
- Description: descriptionVal,
- Id: idVal,
- MaxGb: maxGbVal,
- Memory: memoryVal,
- MinGb: minGbVal,
- NodeType: nodeTypeVal,
- StorageClasses: storageClassesVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewFlavorsValueNull() FlavorsValue {
- return FlavorsValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewFlavorsValueUnknown() FlavorsValue {
- return FlavorsValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing FlavorsValue Attribute Value",
- "While creating a FlavorsValue value, a missing attribute value was detected. "+
- "A FlavorsValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid FlavorsValue Attribute Type",
- "While creating a FlavorsValue value, an invalid attribute value was detected. "+
- "A FlavorsValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra FlavorsValue Attribute Value",
- "While creating a FlavorsValue value, an extra attribute value was detected. "+
- "A FlavorsValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewFlavorsValueUnknown(), diags
- }
-
- cpuAttribute, ok := attributes["cpu"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `cpu is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
- }
-
- descriptionAttribute, ok := attributes["description"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `description is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- maxGbAttribute, ok := attributes["max_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_gb is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
- }
-
- memoryAttribute, ok := attributes["memory"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `memory is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
- }
-
- minGbAttribute, ok := attributes["min_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `min_gb is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
- }
-
- nodeTypeAttribute, ok := attributes["node_type"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `node_type is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
- }
-
- storageClassesAttribute, ok := attributes["storage_classes"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `storage_classes is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
- }
-
- if diags.HasError() {
- return NewFlavorsValueUnknown(), diags
- }
-
- return FlavorsValue{
- Cpu: cpuVal,
- Description: descriptionVal,
- Id: idVal,
- MaxGb: maxGbVal,
- Memory: memoryVal,
- MinGb: minGbVal,
- NodeType: nodeTypeVal,
- StorageClasses: storageClassesVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue {
- object, diags := NewFlavorsValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewFlavorsValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewFlavorsValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewFlavorsValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
- return FlavorsValue{}
-}
-
-var _ basetypes.ObjectValuable = FlavorsValue{}
-
-type FlavorsValue struct {
- Cpu basetypes.Int64Value `tfsdk:"cpu"`
- Description basetypes.StringValue `tfsdk:"description"`
- Id basetypes.StringValue `tfsdk:"id"`
- MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
- Memory basetypes.Int64Value `tfsdk:"memory"`
- MinGb basetypes.Int64Value `tfsdk:"min_gb"`
- NodeType basetypes.StringValue `tfsdk:"node_type"`
- StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
- state attr.ValueState
-}
-
-func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 8)
-
- var val tftypes.Value
- var err error
-
- attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["storage_classes"] = basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- }.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 8)
-
- val, err = v.Cpu.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["cpu"] = val
-
- val, err = v.Description.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["description"] = val
-
- val, err = v.Id.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["id"] = val
-
- val, err = v.MaxGb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_gb"] = val
-
- val, err = v.Memory.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["memory"] = val
-
- val, err = v.MinGb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["min_gb"] = val
-
- val, err = v.NodeType.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["node_type"] = val
-
- val, err = v.StorageClasses.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["storage_classes"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v FlavorsValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v FlavorsValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v FlavorsValue) String() string {
- return "FlavorsValue"
-}
-
-func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- storageClasses := types.ListValueMust(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- v.StorageClasses.Elements(),
- )
-
- if v.StorageClasses.IsNull() {
- storageClasses = types.ListNull(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- )
- }
-
- if v.StorageClasses.IsUnknown() {
- storageClasses = types.ListUnknown(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- )
- }
-
- attributeTypes := map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
- "description": basetypes.StringType{},
- "id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
- "node_type": basetypes.StringType{},
- "storage_classes": basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- },
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "cpu": v.Cpu,
- "description": v.Description,
- "id": v.Id,
- "max_gb": v.MaxGb,
- "memory": v.Memory,
- "min_gb": v.MinGb,
- "node_type": v.NodeType,
- "storage_classes": storageClasses,
- })
-
- return objVal, diags
-}
-
-func (v FlavorsValue) Equal(o attr.Value) bool {
- other, ok := o.(FlavorsValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Cpu.Equal(other.Cpu) {
- return false
- }
-
- if !v.Description.Equal(other.Description) {
- return false
- }
-
- if !v.Id.Equal(other.Id) {
- return false
- }
-
- if !v.MaxGb.Equal(other.MaxGb) {
- return false
- }
-
- if !v.Memory.Equal(other.Memory) {
- return false
- }
-
- if !v.MinGb.Equal(other.MinGb) {
- return false
- }
-
- if !v.NodeType.Equal(other.NodeType) {
- return false
- }
-
- if !v.StorageClasses.Equal(other.StorageClasses) {
- return false
- }
-
- return true
-}
-
-func (v FlavorsValue) Type(ctx context.Context) attr.Type {
- return FlavorsType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
- "description": basetypes.StringType{},
- "id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
- "node_type": basetypes.StringType{},
- "storage_classes": basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- },
- }
-}
-
-var _ basetypes.ObjectTypable = StorageClassesType{}
-
-type StorageClassesType struct {
- basetypes.ObjectType
-}
-
-func (t StorageClassesType) Equal(o attr.Type) bool {
- other, ok := o.(StorageClassesType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t StorageClassesType) String() string {
- return "StorageClassesType"
-}
-
-func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return nil, diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_io_per_sec is missing from object`)
-
- return nil, diags
- }
-
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
- }
-
- maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_through_in_mb is missing from object`)
-
- return nil, diags
- }
-
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return StorageClassesValue{
- Class: classVal,
- MaxIoPerSec: maxIoPerSecVal,
- MaxThroughInMb: maxThroughInMbVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageClassesValueNull() StorageClassesValue {
- return StorageClassesValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewStorageClassesValueUnknown() StorageClassesValue {
- return StorageClassesValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing StorageClassesValue Attribute Value",
- "While creating a StorageClassesValue value, a missing attribute value was detected. "+
- "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid StorageClassesValue Attribute Type",
- "While creating a StorageClassesValue value, an invalid attribute value was detected. "+
- "A StorageClassesValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra StorageClassesValue Attribute Value",
- "While creating a StorageClassesValue value, an extra attribute value was detected. "+
- "A StorageClassesValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewStorageClassesValueUnknown(), diags
- }
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_io_per_sec is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
- }
-
- maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_through_in_mb is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
- }
-
- if diags.HasError() {
- return NewStorageClassesValueUnknown(), diags
- }
-
- return StorageClassesValue{
- Class: classVal,
- MaxIoPerSec: maxIoPerSecVal,
- MaxThroughInMb: maxThroughInMbVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue {
- object, diags := NewStorageClassesValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewStorageClassesValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewStorageClassesValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewStorageClassesValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t StorageClassesType) ValueType(ctx context.Context) attr.Value {
- return StorageClassesValue{}
-}
-
-var _ basetypes.ObjectValuable = StorageClassesValue{}
-
-type StorageClassesValue struct {
- Class basetypes.StringValue `tfsdk:"class"`
- MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
- MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
- state attr.ValueState
-}
-
-func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 3)
-
- var val tftypes.Value
- var err error
-
- attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 3)
-
- val, err = v.Class.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["class"] = val
-
- val, err = v.MaxIoPerSec.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_io_per_sec"] = val
-
- val, err = v.MaxThroughInMb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_through_in_mb"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v StorageClassesValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v StorageClassesValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v StorageClassesValue) String() string {
- return "StorageClassesValue"
-}
-
-func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "class": v.Class,
- "max_io_per_sec": v.MaxIoPerSec,
- "max_through_in_mb": v.MaxThroughInMb,
- })
-
- return objVal, diags
-}
-
-func (v StorageClassesValue) Equal(o attr.Value) bool {
- other, ok := o.(StorageClassesValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Class.Equal(other.Class) {
- return false
- }
-
- if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) {
- return false
- }
-
- if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) {
- return false
- }
-
- return true
-}
-
-func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
- return StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
- }
-}
-
-var _ basetypes.ObjectTypable = PaginationType{}
-
-type PaginationType struct {
- basetypes.ObjectType
-}
-
-func (t PaginationType) Equal(o attr.Type) bool {
- other, ok := o.(PaginationType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t PaginationType) String() string {
- return "PaginationType"
-}
-
-func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return nil, diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return nil, diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return nil, diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return nil, diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueNull() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewPaginationValueUnknown() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing PaginationValue Attribute Value",
- "While creating a PaginationValue value, a missing attribute value was detected. "+
- "A PaginationValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid PaginationValue Attribute Type",
- "While creating a PaginationValue value, an invalid attribute value was detected. "+
- "A PaginationValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra PaginationValue Attribute Value",
- "While creating a PaginationValue value, an extra attribute value was detected. "+
- "A PaginationValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
- object, diags := NewPaginationValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewPaginationValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewPaginationValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewPaginationValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t PaginationType) ValueType(ctx context.Context) attr.Value {
- return PaginationValue{}
-}
-
-var _ basetypes.ObjectValuable = PaginationValue{}
-
-type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
- Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
- state attr.ValueState
-}
-
-func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 5)
-
- var val tftypes.Value
- var err error
-
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 5)
-
- val, err = v.Page.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["page"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- val, err = v.Sort.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["sort"] = val
-
- val, err = v.TotalPages.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_pages"] = val
-
- val, err = v.TotalRows.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_rows"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v PaginationValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v PaginationValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v PaginationValue) String() string {
- return "PaginationValue"
-}
-
-func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "page": v.Page,
- "size": v.Size,
- "sort": v.Sort,
- "total_pages": v.TotalPages,
- "total_rows": v.TotalRows,
- })
-
- return objVal, diags
-}
-
-func (v PaginationValue) Equal(o attr.Value) bool {
- other, ok := o.(PaginationValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Page.Equal(other.Page) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- if !v.Sort.Equal(other.Sort) {
- return false
- }
-
- if !v.TotalPages.Equal(other.TotalPages) {
- return false
- }
-
- if !v.TotalRows.Equal(other.TotalRows) {
- return false
- }
-
- return true
-}
-
-func (v PaginationValue) Type(ctx context.Context) attr.Type {
- return PaginationType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/functions.go b/stackit/internal/services/sqlserverflexalpha/flavor/functions.go
deleted file mode 100644
index 469b7bce..00000000
--- a/stackit/internal/services/sqlserverflexalpha/flavor/functions.go
+++ /dev/null
@@ -1,65 +0,0 @@
-package sqlserverflexalphaFlavor
-
-import (
- "context"
- "fmt"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-)
-
-type flavorsClientReader interface {
- GetFlavorsRequest(
- ctx context.Context,
- projectId, region string,
- ) sqlserverflexalpha.ApiGetFlavorsRequestRequest
-}
-
-func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
- []sqlserverflexalpha.ListFlavors,
- error,
-) {
- getAllFilter := func(_ sqlserverflexalpha.ListFlavors) bool { return true }
- flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
- if err != nil {
- return nil, err
- }
- return flavorList, nil
-}
-
-// getFlavorsByFilter is a helper function to retrieve flavors using a filtern function.
-// Hint: The API does not have a GetFlavors endpoint, only ListFlavors
-func getFlavorsByFilter(
- ctx context.Context,
- client flavorsClientReader,
- projectId, region string,
- filter func(db sqlserverflexalpha.ListFlavors) bool,
-) ([]sqlserverflexalpha.ListFlavors, error) {
- if projectId == "" || region == "" {
- return nil, fmt.Errorf("listing sqlserverflexalpha flavors: projectId and region are required")
- }
-
- const pageSize = 25
-
- var result = make([]sqlserverflexalpha.ListFlavors, 0)
-
- for page := int64(1); ; page++ {
- res, err := client.GetFlavorsRequest(ctx, projectId, region).
- Page(page).Size(pageSize).Sort(sqlserverflexalpha.FLAVORSORT_INDEX_ASC).Execute()
- if err != nil {
- return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
- }
-
- // If the API returns no flavors, we have reached the end of the list.
- if res.Flavors == nil || len(*res.Flavors) == 0 {
- break
- }
-
- for _, flavor := range *res.Flavors {
- if filter(flavor) {
- result = append(result, flavor)
- }
- }
- }
-
- return result, nil
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go b/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go
deleted file mode 100644
index bed6462c..00000000
--- a/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go
+++ /dev/null
@@ -1,135 +0,0 @@
-package sqlserverflexalphaFlavor
-
-import (
- "context"
- "testing"
-
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-)
-
-type mockRequest struct {
- executeFunc func() (*sqlserverflexalpha.GetFlavorsResponse, error)
-}
-
-func (m *mockRequest) Page(_ int64) sqlserverflexalpha.ApiGetFlavorsRequestRequest { return m }
-func (m *mockRequest) Size(_ int64) sqlserverflexalpha.ApiGetFlavorsRequestRequest { return m }
-func (m *mockRequest) Sort(_ sqlserverflexalpha.FlavorSort) sqlserverflexalpha.ApiGetFlavorsRequestRequest {
- return m
-}
-func (m *mockRequest) Execute() (*sqlserverflexalpha.GetFlavorsResponse, error) {
- return m.executeFunc()
-}
-
-type mockFlavorsClient struct {
- executeRequest func() sqlserverflexalpha.ApiGetFlavorsRequestRequest
-}
-
-func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) sqlserverflexalpha.ApiGetFlavorsRequestRequest {
- return m.executeRequest()
-}
-
-var mockResp = func(page int64) (*sqlserverflexalpha.GetFlavorsResponse, error) {
- if page == 1 {
- return &sqlserverflexalpha.GetFlavorsResponse{
- Flavors: &[]sqlserverflexalpha.ListFlavors{
- {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
- {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
- },
- }, nil
- }
- if page == 2 {
- return &sqlserverflexalpha.GetFlavorsResponse{
- Flavors: &[]sqlserverflexalpha.ListFlavors{
- {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
- },
- }, nil
- }
-
- return &sqlserverflexalpha.GetFlavorsResponse{
- Flavors: &[]sqlserverflexalpha.ListFlavors{},
- }, nil
-}
-
-func TestGetFlavorsByFilter(t *testing.T) {
- tests := []struct {
- description string
- projectId string
- region string
- mockErr error
- filter func(sqlserverflexalpha.ListFlavors) bool
- wantCount int
- wantErr bool
- }{
- {
- description: "Success - Get all flavors (2 pages)",
- projectId: "pid", region: "reg",
- filter: func(_ sqlserverflexalpha.ListFlavors) bool { return true },
- wantCount: 3,
- wantErr: false,
- },
- {
- description: "Success - Filter flavors by description",
- projectId: "pid", region: "reg",
- filter: func(f sqlserverflexalpha.ListFlavors) bool { return *f.Description == "first" },
- wantCount: 1,
- wantErr: false,
- },
- {
- description: "Error - Missing parameters",
- projectId: "", region: "reg",
- wantErr: true,
- },
- }
-
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- var currentPage int64
- client := &mockFlavorsClient{
- executeRequest: func() sqlserverflexalpha.ApiGetFlavorsRequestRequest {
- return &mockRequest{
- executeFunc: func() (*sqlserverflexalpha.GetFlavorsResponse, error) {
- currentPage++
- return mockResp(currentPage)
- },
- }
- },
- }
- actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter)
-
- if (err != nil) != tt.wantErr {
- t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
-
- if !tt.wantErr && len(actual) != tt.wantCount {
- t.Errorf("getFlavorsByFilter() got %d flavors, want %d", len(actual), tt.wantCount)
- }
- },
- )
- }
-}
-
-func TestGetAllFlavors(t *testing.T) {
- var currentPage int64
- client := &mockFlavorsClient{
- executeRequest: func() sqlserverflexalpha.ApiGetFlavorsRequestRequest {
- return &mockRequest{
- executeFunc: func() (*sqlserverflexalpha.GetFlavorsResponse, error) {
- currentPage++
- return mockResp(currentPage)
- },
- }
- },
- }
-
- res, err := getAllFlavors(context.Background(), client, "pid", "reg")
- if err != nil {
- t.Errorf("getAllFlavors() unexpected error: %v", err)
- }
- if len(res) != 3 {
- t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
- }
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go
deleted file mode 100644
index 2286e81b..00000000
--- a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go
+++ /dev/null
@@ -1,156 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-
- sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen"
-)
-
-var _ datasource.DataSource = (*flavorsDataSource)(nil)
-
-const errorPrefix = "[sqlserverflexalpha - Flavors]"
-
-func NewFlavorsDataSource() datasource.DataSource {
- return &flavorsDataSource{}
-}
-
-type dataSourceModel struct {
- sqlserverflexalphaGen.FlavorsModel
- TerraformId types.String `tfsdk:"id"`
-}
-
-type flavorsDataSource struct {
- client *sqlserverflexalphaPkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *flavorsDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_flavors"
-}
-
-func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexalphaGen.FlavorsDataSourceSchema(ctx)
- resp.Schema.Attributes["id"] = schema.StringAttribute{
- Computed: true,
- Description: "The terraform internal identifier.",
- MarkdownDescription: "The terraform internal identifier.",
- }
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *flavorsDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
-
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- // TODO: implement right identifier for flavors
- flavorsId := data.Flavors
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // TODO: implement needed fields
- ctx = tflog.SetField(ctx, "flavors_id", flavorsId)
-
- // TODO: refactor to correct implementation
- _, err := d.client.GetFlavorsRequest(ctx, projectId, region).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading flavors",
- fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // TODO: refactor to correct implementation of internal tf id
- data.TerraformId = utils.BuildInternalTerraformId(projectId, region)
-
- // TODO: fill remaining fields
- // data.Flavors = types.Sometype(apiResponse.GetFlavors())
- // data.Page = types.Sometype(apiResponse.GetPage())
- // data.Pagination = types.Sometype(apiResponse.GetPagination())
- // data.ProjectId = types.Sometype(apiResponse.GetProjectId())
- // data.Region = types.Sometype(apiResponse.GetRegion())
- // data.Size = types.Sometype(apiResponse.GetSize())
- // data.Sort = types.Sometype(apiResponse.GetSort())// Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, fmt.Sprintf("%s read successful", errorPrefix))
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
deleted file mode 100644
index 123b1fe8..00000000
--- a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
+++ /dev/null
@@ -1,146 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-
- sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen"
-)
-
-var _ datasource.DataSource = (*instanceDataSource)(nil)
-
-const errorPrefix = "[sqlserverflexalpha - Instance]"
-
-func NewInstanceDataSource() datasource.DataSource {
- return &instanceDataSource{}
-}
-
-// dataSourceModel maps the data source schema data.
-type dataSourceModel struct {
- sqlserverflexalphaGen.InstanceModel
- TerraformID types.String `tfsdk:"id"`
-}
-
-type instanceDataSource struct {
- client *sqlserverflexalphaPkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *instanceDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_instance"
-}
-
-func (d *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexalphaGen.InstanceDataSourceSchema(ctx)
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *instanceDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
-
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- instanceId := data.InstanceId.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- instanceResp, err := d.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading instance",
- fmt.Sprintf("instance with ID %q does not exist in project %q.", instanceId, projectId),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- err = mapDataResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- fmt.Sprintf("%s Read", errorPrefix),
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go
deleted file mode 100644
index a8567903..00000000
--- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go
+++ /dev/null
@@ -1,283 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- "errors"
- "fmt"
- "math"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- sqlserverflexalphaDataGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen"
- sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen"
-)
-
-func mapResponseToModel(
- ctx context.Context,
- resp *sqlserverflexalpha.GetInstanceResponse,
- m *sqlserverflexalphaResGen.InstanceModel,
- tfDiags diag.Diagnostics,
-) error {
- m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Edition = types.StringValue(string(resp.GetEdition()))
- m.Encryption = handleEncryption(m, resp)
- m.FlavorId = types.StringValue(resp.GetFlavorId())
- m.Id = types.StringValue(resp.GetId())
- m.InstanceId = types.StringValue(resp.GetId())
- m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
- m.Name = types.StringValue(resp.GetName())
- netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting network acl response value",
- )
- }
- net, diags := sqlserverflexalphaResGen.NewNetworkValue(
- sqlserverflexalphaResGen.NetworkValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
- "acl": netAcl,
- "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
- "router_address": types.StringValue(resp.Network.GetRouterAddress()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return errors.New("error converting network response value")
- }
- m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
- m.Status = types.StringValue(string(resp.GetStatus()))
-
- stor, diags := sqlserverflexalphaResGen.NewStorageValue(
- sqlserverflexalphaResGen.StorageValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(resp.Storage.GetClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf("error converting storage response value")
- }
- m.Storage = stor
-
- m.Version = types.StringValue(string(resp.GetVersion()))
- return nil
-}
-
-func mapDataResponseToModel(
- ctx context.Context,
- resp *sqlserverflexalpha.GetInstanceResponse,
- m *dataSourceModel,
- tfDiags diag.Diagnostics,
-) error {
- m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Edition = types.StringValue(string(resp.GetEdition()))
- m.Encryption = handleDSEncryption(m, resp)
- m.FlavorId = types.StringValue(resp.GetFlavorId())
- m.Id = types.StringValue(resp.GetId())
- m.InstanceId = types.StringValue(resp.GetId())
- m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
- m.Name = types.StringValue(resp.GetName())
- netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting network acl response value",
- )
- }
- net, diags := sqlserverflexalphaDataGen.NewNetworkValue(
- sqlserverflexalphaDataGen.NetworkValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
- "acl": netAcl,
- "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
- "router_address": types.StringValue(resp.Network.GetRouterAddress()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return errors.New("error converting network response value")
- }
- m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
- m.Status = types.StringValue(string(resp.GetStatus()))
-
- stor, diags := sqlserverflexalphaDataGen.NewStorageValue(
- sqlserverflexalphaDataGen.StorageValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(resp.Storage.GetClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf("error converting storage response value")
- }
- m.Storage = stor
-
- m.Version = types.StringValue(string(resp.GetVersion()))
- return nil
-}
-
-func handleEncryption(
- m *sqlserverflexalphaResGen.InstanceModel,
- resp *sqlserverflexalpha.GetInstanceResponse,
-) sqlserverflexalphaResGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == nil ||
- resp.Encryption.KekKeyRingId == nil ||
- resp.Encryption.KekKeyVersion == nil ||
- resp.Encryption.ServiceAccount == nil {
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return sqlserverflexalphaResGen.NewEncryptionValueNull()
- }
- return m.Encryption
- }
-
- enc := sqlserverflexalphaResGen.NewEncryptionValueNull()
- if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
- enc.KekKeyId = types.StringValue(kVal)
- }
- if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
- enc.KekKeyRingId = types.StringValue(kkVal)
- }
- if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
- enc.KekKeyVersion = types.StringValue(kkvVal)
- }
- if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
- enc.ServiceAccount = types.StringValue(sa)
- }
- return enc
-}
-
-func handleDSEncryption(
- m *dataSourceModel,
- resp *sqlserverflexalpha.GetInstanceResponse,
-) sqlserverflexalphaDataGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == nil ||
- resp.Encryption.KekKeyRingId == nil ||
- resp.Encryption.KekKeyVersion == nil ||
- resp.Encryption.ServiceAccount == nil {
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return sqlserverflexalphaDataGen.NewEncryptionValueNull()
- }
- return m.Encryption
- }
-
- enc := sqlserverflexalphaDataGen.NewEncryptionValueNull()
- if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
- enc.KekKeyId = types.StringValue(kVal)
- }
- if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
- enc.KekKeyRingId = types.StringValue(kkVal)
- }
- if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
- enc.KekKeyVersion = types.StringValue(kkvVal)
- }
- if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
- enc.ServiceAccount = types.StringValue(sa)
- }
- return enc
-}
-
-func toCreatePayload(
- ctx context.Context,
- model *sqlserverflexalphaResGen.InstanceModel,
-) (*sqlserverflexalpha.CreateInstanceRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- storagePayload := &sqlserverflexalpha.CreateInstanceRequestPayloadGetStorageArgType{}
- if !model.Storage.IsNull() && !model.Storage.IsUnknown() {
- storagePayload.Class = model.Storage.Class.ValueStringPointer()
- storagePayload.Size = model.Storage.Size.ValueInt64Pointer()
- }
-
- var encryptionPayload *sqlserverflexalpha.CreateInstanceRequestPayloadGetEncryptionArgType = nil
- if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() &&
- !model.Encryption.KekKeyId.IsNull() && model.Encryption.KekKeyId.IsUnknown() && model.Encryption.KekKeyId.ValueString() != "" &&
- !model.Encryption.KekKeyRingId.IsNull() && !model.Encryption.KekKeyRingId.IsUnknown() && model.Encryption.KekKeyRingId.ValueString() != "" &&
- !model.Encryption.KekKeyVersion.IsNull() && !model.Encryption.KekKeyVersion.IsUnknown() && model.Encryption.KekKeyVersion.ValueString() != "" &&
- !model.Encryption.ServiceAccount.IsNull() && !model.Encryption.ServiceAccount.IsUnknown() && model.Encryption.ServiceAccount.ValueString() != "" {
- encryptionPayload = &sqlserverflexalpha.CreateInstanceRequestPayloadGetEncryptionArgType{
- KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(),
- KekKeyRingId: model.Encryption.KekKeyVersion.ValueStringPointer(),
- KekKeyVersion: model.Encryption.KekKeyRingId.ValueStringPointer(),
- ServiceAccount: model.Encryption.ServiceAccount.ValueStringPointer(),
- }
- }
-
- networkPayload := &sqlserverflexalpha.CreateInstanceRequestPayloadGetNetworkArgType{}
- if !model.Network.IsNull() && !model.Network.IsUnknown() {
- networkPayload.AccessScope = sqlserverflexalpha.CreateInstanceRequestPayloadNetworkGetAccessScopeAttributeType(
- model.Network.AccessScope.ValueStringPointer(),
- )
-
- var resList []string
- diags := model.Network.Acl.ElementsAs(ctx, &resList, false)
- if diags.HasError() {
- return nil, fmt.Errorf("error converting network acl list")
- }
- networkPayload.Acl = &resList
- }
-
- return &sqlserverflexalpha.CreateInstanceRequestPayload{
- BackupSchedule: conversion.StringValueToPointer(model.BackupSchedule),
- Encryption: encryptionPayload,
- FlavorId: conversion.StringValueToPointer(model.FlavorId),
- Name: conversion.StringValueToPointer(model.Name),
- Network: networkPayload,
- RetentionDays: conversion.Int64ValueToPointer(model.RetentionDays),
- Storage: storagePayload,
- Version: sqlserverflexalpha.CreateInstanceRequestPayloadGetVersionAttributeType(
- conversion.StringValueToPointer(model.Version),
- ),
- }, nil
-}
-
-func toUpdatePayload(
- ctx context.Context,
- m *sqlserverflexalphaResGen.InstanceModel,
- resp *resource.UpdateResponse,
-) (*sqlserverflexalpha.UpdateInstanceRequestPayload, error) {
- if m == nil {
- return nil, fmt.Errorf("nil model")
- }
- if m.Replicas.ValueInt64() > math.MaxUint32 {
- return nil, fmt.Errorf("replicas value is too big for uint32")
- }
- replVal := sqlserverflexalpha.Replicas(uint32(m.Replicas.ValueInt64())) // nolint:gosec // check is performed above
-
- var netAcl []string
- diags := m.Network.Acl.ElementsAs(ctx, &netAcl, false)
- resp.Diagnostics.Append(diags...)
- if diags.HasError() {
- return nil, fmt.Errorf("error converting model network acl value")
- }
- return &sqlserverflexalpha.UpdateInstanceRequestPayload{
- BackupSchedule: m.BackupSchedule.ValueStringPointer(),
- FlavorId: m.FlavorId.ValueStringPointer(),
- Name: m.Name.ValueStringPointer(),
- Network: sqlserverflexalpha.NewUpdateInstanceRequestPayloadNetwork(netAcl),
- Replicas: &replVal,
- RetentionDays: m.RetentionDays.ValueInt64Pointer(),
- Storage: &sqlserverflexalpha.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()},
- Version: sqlserverflexalpha.UpdateInstanceRequestPayloadGetVersionAttributeType(
- m.Version.ValueStringPointer(),
- ),
- }, nil
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/instance/planModifiers.yaml
deleted file mode 100644
index 71d4cbe4..00000000
--- a/stackit/internal/services/sqlserverflexalpha/instance/planModifiers.yaml
+++ /dev/null
@@ -1,124 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'name'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'backup_schedule'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'encryption.kek_key_id'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'encryption.kek_key_version'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'encryption.kek_key_ring_id'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'encryption.service_account'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'network.access_scope'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'network.acl'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'network.instance_address'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'network.router_address'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'status'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'region'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'retention_days'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'edition'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'version'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'replicas'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'storage'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'storage.class'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'storage.size'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'flavor_id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'is_deletable'
- modifiers:
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go
deleted file mode 100644
index 3b1f4fd3..00000000
--- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go
+++ /dev/null
@@ -1,554 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- _ "embed"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen"
-)
-
-var (
- _ resource.Resource = &instanceResource{}
- _ resource.ResourceWithConfigure = &instanceResource{}
- _ resource.ResourceWithImportState = &instanceResource{}
- _ resource.ResourceWithModifyPlan = &instanceResource{}
- _ resource.ResourceWithIdentity = &instanceResource{}
-)
-
-func NewInstanceResource() resource.Resource {
- return &instanceResource{}
-}
-
-type instanceResource struct {
- client *sqlserverflexalpha.APIClient
- providerData core.ProviderData
-}
-
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexalphaResGen.InstanceModel
-
-type InstanceResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
-}
-
-func (r *instanceResource) Metadata(
- _ context.Context,
- req resource.MetadataRequest,
- resp *resource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_instance"
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexalphaResGen.InstanceResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-func (r *instanceResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- resp *resource.IdentitySchemaResponse,
-) {
- resp.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *instanceResource) Configure(
- ctx context.Context,
- req resource.ConfigureRequest,
- resp *resource.ConfigureResponse,
-) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
- }
- apiClient, err := sqlserverflexalpha.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "sqlserverflexalpha.Instance client configured")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *instanceResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
- var configModel resourceModel
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- if req.Plan.Raw.IsNull() {
- return
- }
- var planModel resourceModel
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data resourceModel
- crateErr := "[SQL Server Flex BETA - Create] error"
-
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // Generate API request body from model
- payload, err := toCreatePayload(ctx, &data)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- fmt.Sprintf("Creating API payload: %v", err),
- )
- return
- }
- // Create new Instance
- createResp, err := r.client.CreateInstanceRequest(
- ctx,
- projectId,
- region,
- ).CreateInstanceRequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, crateErr, fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- InstanceId := *createResp.Id
-
- // Example data value setting
- data.InstanceId = types.StringValue("id-from-response")
-
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(InstanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- waitResp, err := wait.CreateInstanceWaitHandler(
- ctx,
- r.client,
- projectId,
- InstanceId,
- region,
- ).SetSleepBeforeWait(
- 10 * time.Second,
- ).SetTimeout(
- 90 * time.Minute,
- ).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- fmt.Sprintf("Instance creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- "Instance creation waiting: returned id is nil",
- )
- return
- }
-
- // Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- fmt.Sprintf("processing API payload: %v", err),
- )
- return
- }
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "sqlserverflexalpha.Instance created")
-}
-
-func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data resourceModel
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Read identity data
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if ok && oapiErr.StatusCode == http.StatusNotFound {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", err.Error())
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading instance",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save identity into Terraform state
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexalpha.Instance read")
-}
-
-func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data resourceModel
- updateInstanceError := "Error updating instance"
-
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- // Generate API request body from model
- payload, err := toUpdatePayload(ctx, &data, resp)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- updateInstanceError,
- fmt.Sprintf("Creating API payload: %v", err),
- )
- return
- }
- // Update existing instance
- err = r.client.UpdateInstanceRequest(
- ctx,
- projectId,
- region,
- instanceId,
- ).UpdateInstanceRequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, updateInstanceError, err.Error())
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- waitResp, err := wait.
- UpdateInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).
- SetSleepBeforeWait(15 * time.Second).
- SetTimeout(45 * time.Minute).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- updateInstanceError,
- fmt.Sprintf("Instance update waiting: %v", err),
- )
- return
- }
-
- // Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- updateInstanceError,
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexalpha.Instance updated")
-}
-
-func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data resourceModel
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Read identity data
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- instanceId := identityData.InstanceID.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- // Delete existing instance
- err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- delResp, err := wait.DeleteInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting instance",
- fmt.Sprintf("Instance deletion waiting: %v", err),
- )
- return
- }
-
- if delResp != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting instance",
- "wait handler returned non nil result",
- )
- return
- }
-
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "sqlserverflexalpha.Instance deleted")
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *instanceResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing instance",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id] Got: %q",
- req.ID,
- ),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- return
- }
-
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
-
- tflog.Info(ctx, "sqlserverflexalpha instance state imported")
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go
deleted file mode 100644
index 9eebac99..00000000
--- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go
+++ /dev/null
@@ -1,433 +0,0 @@
-package sqlserverflexalpha_test
-
-import (
- "context"
- _ "embed"
- "fmt"
- "log"
- "os"
- "strconv"
- "strings"
- "testing"
-
- "github.com/hashicorp/terraform-plugin-testing/helper/acctest"
- "github.com/hashicorp/terraform-plugin-testing/helper/resource"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
- sqlserverflexalphaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
-
- // The fwresource import alias is so there is no collision
- // with the more typical acceptance testing import:
- // "github.com/hashicorp/terraform-plugin-testing/helper/resource"
- fwresource "github.com/hashicorp/terraform-plugin-framework/resource"
-)
-
-const providerPrefix = "stackitprivatepreview_sqlserverflexalpha"
-
-var testInstances []string
-
-func init() {
- sweeperName := fmt.Sprintf("%s_%s", providerPrefix, "sweeper")
-
- resource.AddTestSweepers(sweeperName, &resource.Sweeper{
- Name: sweeperName,
- F: func(region string) error {
- ctx := context.Background()
- apiClientConfigOptions := []config.ConfigurationOption{}
- apiClient, err := sqlserverflexalphaPkgGen.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- log.Fatalln(err)
- }
-
- instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, region).
- Size(100).
- Execute()
- if err != nil {
- log.Fatalln(err)
- }
-
- for _, inst := range instances.GetInstances() {
- if strings.HasPrefix(inst.GetName(), "tf-acc-") {
- for _, item := range testInstances {
- if inst.GetName() == item {
- delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, region, inst.GetId())
- if delErr != nil {
- // TODO: maybe just warn?
- log.Fatalln(delErr)
- }
- }
- }
- }
- }
- return nil
- },
- })
-}
-
-func TestInstanceResourceSchema(t *testing.T) {
- t.Parallel()
-
- ctx := context.Background()
- schemaRequest := fwresource.SchemaRequest{}
- schemaResponse := &fwresource.SchemaResponse{}
-
- // Instantiate the resource.Resource and call its Schema method
- sqlserverflexalpha.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse)
-
- if schemaResponse.Diagnostics.HasError() {
- t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics)
- }
-
- // Validate the schema
- diagnostics := schemaResponse.Schema.ValidateImplementation(ctx)
-
- if diagnostics.HasError() {
- t.Fatalf("Schema validation diagnostics: %+v", diagnostics)
- }
-}
-
-func TestMain(m *testing.M) {
- testutils.Setup()
- code := m.Run()
- // shutdown()
- os.Exit(code)
-}
-
-func testAccPreCheck(t *testing.T) {
- if _, ok := os.LookupEnv("TF_ACC_PROJECT_ID"); !ok {
- t.Fatalf("could not find env var TF_ACC_PROJECT_ID")
- }
-}
-
-type resData struct {
- ServiceAccountFilePath string
- ProjectId string
- Region string
- Name string
- TfName string
- FlavorId string
- BackupSchedule string
- UseEncryption bool
- KekKeyId string
- KekKeyRingId string
- KekKeyVersion uint8
- KekServiceAccount string
- PerformanceClass string
- Size uint32
- AclString string
- AccessScope string
- RetentionDays uint32
- Version string
- Users []User
- Databases []Database
-}
-
-type User struct {
- Name string
- ProjectId string
- Roles []string
-}
-
-type Database struct {
- Name string
- ProjectId string
- Owner string
- Collation string
- Compatibility string
-}
-
-func resName(res, name string) string {
- return fmt.Sprintf("%s_%s.%s", providerPrefix, res, name)
-}
-
-func getExample() resData {
- name := acctest.RandomWithPrefix("tf-acc")
- return resData{
- Region: os.Getenv("TF_ACC_REGION"),
- ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Name: name,
- TfName: name,
- FlavorId: "4.16-Single",
- BackupSchedule: "0 0 * * *",
- UseEncryption: false,
- RetentionDays: 33,
- PerformanceClass: "premium-perf2-stackit",
- Size: 10,
- AclString: "0.0.0.0/0",
- AccessScope: "PUBLIC",
- Version: "2022",
- }
-}
-
-func TestAccInstance(t *testing.T) {
- exData := getExample()
-
- updNameData := exData
- updNameData.Name = "name-updated"
-
- updSizeData := exData
- updSizeData.Size = 25
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", exData.TfName)
- testInstances = append(testInstances, exData.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- // Update name and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updNameData,
- ),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", updNameData.Name),
- ),
- },
- // Update size and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updSizeData,
- ),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(
- testutils.ResStr(providerPrefix, "instance", exData.TfName),
- "storage.size",
- strconv.Itoa(int(updSizeData.Size)),
- ),
- ),
- },
- {
- RefreshState: true,
- },
- //// Import test
- //{
- // ResourceName: resName("instance", exData.TfName),
- // ImportState: true,
- // ImportStateVerify: true,
- // },
- },
- })
-}
-
-func TestAccInstanceNoEncryption(t *testing.T) {
- data := getExample()
-
- dbName := "testDb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{
- "##STACKIT_DatabaseManager##",
- "##STACKIT_LoginManager##",
- "##STACKIT_ProcessManager##",
- "##STACKIT_SQLAgentManager##",
- "##STACKIT_SQLAgentUser##",
- "##STACKIT_ServerManager##",
- },
- },
- }
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- testInstances = append(testInstances, data.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- // check instance values are set
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"),
-
- resource.TestCheckNoResourceAttr(resName("instance", data.TfName), "encryption"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"),
-
- // check instance values are correct
- resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name),
-
- // check user values are set
- resource.TestCheckResourceAttrSet(resName("user", userName), "id"),
- resource.TestCheckResourceAttrSet(resName("user", userName), "username"),
- // resource.TestCheckResourceAttrSet(resName("user", userName), "roles"),
-
- // func(s *terraform.State) error {
- // return nil
- // },
-
- // check user values are correct
- resource.TestCheckResourceAttr(resName("user", userName), "username", userName),
- resource.TestCheckResourceAttr(resName("user", userName), "roles.#", strconv.Itoa(len(data.Users[0].Roles))),
-
- // check database values are set
- resource.TestCheckResourceAttrSet(resName("database", dbName), "id"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "name"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"),
-
- // check database values are correct
- resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName),
- ),
- },
- },
- })
-}
-
-func TestAccInstanceEncryption(t *testing.T) {
- data := getExample()
-
- dbName := "testDb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{"##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"},
- },
- }
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
-
- data.UseEncryption = true
- data.KekKeyId = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
- data.KekKeyRingId = "6a2d95ab-3c4c-4963-a2bb-08d17a320e27"
- data.KekKeyVersion = 1
- data.KekServiceAccount = "henselinm-u2v3ex1@sa.stackit.cloud"
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- testInstances = append(testInstances, data.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- // check instance values are set
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"),
-
- // check instance values are correct
- resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name),
-
- // check user values are set
- resource.TestCheckResourceAttrSet(resName("user", userName), "id"),
- resource.TestCheckResourceAttrSet(resName("user", userName), "username"),
-
- // func(s *terraform.State) error {
- // return nil
- // },
-
- // check user values are correct
- resource.TestCheckResourceAttr(resName("user", userName), "username", userName),
- resource.TestCheckResourceAttr(resName("user", userName), "roles.#", "2"),
-
- // check database values are set
- resource.TestCheckResourceAttrSet(resName("database", dbName), "id"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "name"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"),
-
- // check database values are correct
- resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName),
- ),
- },
- },
- })
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl b/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl
deleted file mode 100644
index 0bf11c9c..00000000
--- a/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl
+++ /dev/null
@@ -1,60 +0,0 @@
-provider "stackitprivatepreview" {
- default_region = "{{ .Region }}"
- service_account_key_path = "{{ .ServiceAccountFilePath }}"
-}
-
-resource "stackitprivatepreview_sqlserverflexalpha_instance" "{{ .TfName }}" {
- project_id = "{{ .ProjectId }}"
- name = "{{ .Name }}"
- backup_schedule = "{{ .BackupSchedule }}"
- retention_days = {{ .RetentionDays }}
- flavor_id = "{{ .FlavorId }}"
- storage = {
- class = "{{ .PerformanceClass }}"
- size = {{ .Size }}
- }
-{{ if .UseEncryption }}
- encryption = {
- kek_key_id = "{{ .KekKeyId }}"
- kek_key_ring_id = "{{ .KekKeyRingId }}"
- kek_key_version = {{ .KekKeyVersion }}
- service_account = "{{ .KekServiceAccount }}"
- }
-{{ end }}
- network = {
- acl = ["{{ .AclString }}"]
- access_scope = "{{ .AccessScope }}"
- }
- version = "{{ .Version }}"
-}
-
-{{ if .Users }}
-{{ $tfName := .TfName }}
-{{ range $user := .Users }}
-resource "stackitprivatepreview_sqlserverflexalpha_user" "{{ $user.Name }}" {
- project_id = "{{ $user.ProjectId }}"
- instance_id = stackitprivatepreview_sqlserverflexalpha_instance.{{ $tfName }}.instance_id
- username = "{{ $user.Name }}"
- roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}]
-}
-{{ end }}
-{{ end }}
-
-{{ if .Databases }}
-{{ $tfName := .TfName }}
-{{ range $db := .Databases }}
-resource "stackitprivatepreview_sqlserverflexalpha_database" "{{ $db.Name }}" {
- depends_on = [stackitprivatepreview_sqlserverflexalpha_user.{{ $db.Owner }}]
- project_id = "{{ $db.ProjectId }}"
- instance_id = stackitprivatepreview_sqlserverflexalpha_instance.{{ $tfName }}.instance_id
- name = "{{ $db.Name }}"
- owner = "{{ $db.Owner }}"
-{{ if $db.Collation }}
- collation = "{{ $db.Collation }}"
-{{ end }}
-{{ if $db.Compatibility }}
- compatibility = "{{ $db.Compatibility }}"
-{{ end }}
-}
-{{ end }}
-{{ end }}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource.go b/stackit/internal/services/sqlserverflexalpha/user/datasource.go
deleted file mode 100644
index e191e5a7..00000000
--- a/stackit/internal/services/sqlserverflexalpha/user/datasource.go
+++ /dev/null
@@ -1,139 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/datasources_gen"
-)
-
-var _ datasource.DataSource = (*userDataSource)(nil)
-
-func NewUserDataSource() datasource.DataSource {
- return &userDataSource{}
-}
-
-type dataSourceModel struct {
- DefaultDatabase types.String `tfsdk:"default_database"`
- Host types.String `tfsdk:"host"`
- Id types.String `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Port types.Int64 `tfsdk:"port"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Roles types.List `tfsdk:"roles"`
- Status types.String `tfsdk:"status"`
- UserId types.Int64 `tfsdk:"user_id"`
- Username types.String `tfsdk:"username"`
-}
-
-type userDataSource struct {
- client *sqlserverflexalphaPkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *userDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user"
-}
-
-func (d *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexalphaGen.UserDataSourceSchema(ctx)
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *userDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
- if resp.Diagnostics.HasError() {
- return
- }
- d.client = apiClient
- tflog.Info(ctx, "SQL SERVER Flex alpha database client configured")
-}
-
-func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var model dataSourceModel
- diags := req.Config.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- userId := model.UserId.ValueInt64()
- region := d.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
- ctx = tflog.SetField(ctx, "region", region)
-
- recordSetResp, err := d.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading user",
- fmt.Sprintf(
- "User with ID %q or instance with ID %q does not exist in project %q.",
- userId,
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema and populate Computed attribute values
- err = mapDataSourceFields(recordSetResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set refreshed state
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQLServer Flex beta instance read")
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper.go b/stackit/internal/services/sqlserverflexalpha/user/mapper.go
deleted file mode 100644
index 8e522d59..00000000
--- a/stackit/internal/services/sqlserverflexalpha/user/mapper.go
+++ /dev/null
@@ -1,193 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "fmt"
- "slices"
- "strconv"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapDataSourceFields maps the API response to a dataSourceModel.
-func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *dataSourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- // Handle user ID
- var userId int64
- if model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != nil {
- userId = *user.Id
- } else {
- return fmt.Errorf("user id not present")
- }
-
- // Set main attributes
- model.Id = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
- )
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringPointerValue(user.Username)
-
- // Map roles
- if user.Roles == nil {
- model.Roles = types.List(types.SetNull(types.StringType))
- } else {
- resRoles := *user.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
-
- // Set remaining attributes
- model.Host = types.StringPointerValue(user.Host)
- model.Port = types.Int64PointerValue(user.Port)
- model.Region = types.StringValue(region)
- model.Status = types.StringPointerValue(user.Status)
- model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase)
-
- return nil
-}
-
-// mapFields maps the API response to a resourceModel.
-func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *resourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- // Handle user ID
- var userId int64
- if model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != nil {
- userId = *user.Id
- } else {
- return fmt.Errorf("user id not present")
- }
-
- // Set main attributes
- model.Id = types.Int64Value(userId)
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringPointerValue(user.Username)
-
- // Map roles
- if user.Roles != nil {
- resRoles := *user.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
-
- // Ensure roles is not null
- if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.List(types.SetNull(types.StringType))
- }
-
- // Set connection details
- model.Host = types.StringPointerValue(user.Host)
- model.Port = types.Int64PointerValue(user.Port)
- model.Region = types.StringValue(region)
- return nil
-}
-
-// mapFieldsCreate maps the API response from creating a user to a resourceModel.
-func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *resourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- if user.Id == nil {
- return fmt.Errorf("user id not present")
- }
- userId := *user.Id
- model.Id = types.Int64Value(userId)
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringPointerValue(user.Username)
-
- if user.Password == nil {
- return fmt.Errorf("user password not present")
- }
- model.Password = types.StringValue(*user.Password)
-
- if user.Roles != nil {
- resRoles := *user.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
-
- if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.List(types.SetNull(types.StringType))
- }
-
- model.Password = types.StringPointerValue(user.Password)
- model.Uri = types.StringPointerValue(user.Uri)
-
- model.Host = types.StringPointerValue(user.Host)
- model.Port = types.Int64PointerValue(user.Port)
- model.Region = types.StringValue(region)
- model.Status = types.StringPointerValue(user.Status)
- model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase)
-
- return nil
-}
-
-// toCreatePayload converts a resourceModel to an API CreateUserRequestPayload.
-func toCreatePayload(
- model *resourceModel,
- roles []string,
-) (*sqlserverflexalpha.CreateUserRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- return &sqlserverflexalpha.CreateUserRequestPayload{
- Username: conversion.StringValueToPointer(model.Username),
- DefaultDatabase: conversion.StringValueToPointer(model.DefaultDatabase),
- Roles: &roles,
- }, nil
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go
deleted file mode 100644
index 4dbe7d03..00000000
--- a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go
+++ /dev/null
@@ -1,533 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-)
-
-func TestMapDataSourceFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *sqlserverflexalpha.GetUserResponse
- region string
- expected dataSourceModel
- isValid bool
- }{
- {
- "default_values",
- &sqlserverflexalpha.GetUserResponse{},
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetNull(types.StringType)),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- Status: types.StringNull(),
- DefaultDatabase: types.StringNull(),
- },
- true,
- },
- {
- "simple_values",
- &sqlserverflexalpha.GetUserResponse{
- Roles: &[]string{
- "##STACKIT_SQLAgentUser##",
- "##STACKIT_DatabaseManager##",
- "##STACKIT_LoginManager##",
- "##STACKIT_SQLAgentManager##",
- "##STACKIT_ProcessManager##",
- "##STACKIT_ServerManager##",
- },
- Username: utils.Ptr("username"),
- Host: utils.Ptr("host"),
- Port: utils.Ptr(int64(1234)),
- Status: utils.Ptr("active"),
- DefaultDatabase: utils.Ptr("default_db"),
- },
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue("##STACKIT_DatabaseManager##"),
- types.StringValue("##STACKIT_LoginManager##"),
- types.StringValue("##STACKIT_ProcessManager##"),
- types.StringValue("##STACKIT_SQLAgentManager##"),
- types.StringValue("##STACKIT_SQLAgentUser##"),
- types.StringValue("##STACKIT_ServerManager##"),
- },
- ),
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("active"),
- DefaultDatabase: types.StringValue("default_db"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &sqlserverflexalpha.GetUserResponse{
- Id: utils.Ptr(int64(1)),
- Roles: &[]string{},
- Username: nil,
- Host: nil,
- Port: utils.Ptr(int64(2123456789)),
- },
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Host: types.StringNull(),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "nil_response_2",
- &sqlserverflexalpha.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "no_resource_id",
- &sqlserverflexalpha.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &dataSourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapDataSourceFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFieldsCreate(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *sqlserverflexalpha.CreateUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &sqlserverflexalpha.CreateUserResponse{
- Id: utils.Ptr(int64(1)),
- Password: utils.Ptr(""),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetNull(types.StringType)),
- Password: types.StringValue(""),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "simple_values",
- &sqlserverflexalpha.CreateUserResponse{
- Id: utils.Ptr(int64(2)),
- Roles: &[]string{
- "role_2",
- "role_1",
- "",
- },
- Username: utils.Ptr("username"),
- Password: utils.Ptr("password"),
- Host: utils.Ptr("host"),
- Port: utils.Ptr(int64(1234)),
- Status: utils.Ptr("status"),
- DefaultDatabase: utils.Ptr("default_db"),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(2),
- UserId: types.Int64Value(2),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Password: types.StringValue("password"),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("status"),
- DefaultDatabase: types.StringValue("default_db"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &sqlserverflexalpha.CreateUserResponse{
- Id: utils.Ptr(int64(3)),
- Roles: &[]string{},
- Username: nil,
- Password: utils.Ptr(""),
- Host: nil,
- Port: utils.Ptr(int64(2123456789)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(3),
- UserId: types.Int64Value(3),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Password: types.StringValue(""),
- Host: types.StringNull(),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- DefaultDatabase: types.StringNull(),
- Status: types.StringNull(),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- {
- "nil_response_2",
- &sqlserverflexalpha.CreateUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_resource_id",
- &sqlserverflexalpha.CreateUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_password",
- &sqlserverflexalpha.CreateUserResponse{
- Id: utils.Ptr(int64(1)),
- },
- testRegion,
- resourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- }
- err := mapFieldsCreate(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *sqlserverflexalpha.GetUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &sqlserverflexalpha.GetUserResponse{},
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetNull(types.StringType)),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "simple_values",
- &sqlserverflexalpha.GetUserResponse{
- Roles: &[]string{
- "role_2",
- "role_1",
- "",
- },
- Username: utils.Ptr("username"),
- Host: utils.Ptr("host"),
- Port: utils.Ptr(int64(1234)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(2),
- UserId: types.Int64Value(2),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &sqlserverflexalpha.GetUserResponse{
- Id: utils.Ptr(int64(1)),
- Roles: &[]string{},
- Username: nil,
- Host: nil,
- Port: utils.Ptr(int64(2123456789)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Host: types.StringNull(),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- {
- "nil_response_2",
- &sqlserverflexalpha.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_resource_id",
- &sqlserverflexalpha.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- tests := []struct {
- description string
- input *resourceModel
- inputRoles []string
- expected *sqlserverflexalpha.CreateUserRequestPayload
- isValid bool
- }{
- {
- "default_values",
- &resourceModel{},
- []string{},
- &sqlserverflexalpha.CreateUserRequestPayload{
- Roles: &[]string{},
- Username: nil,
- },
- true,
- },
- {
- "default_values",
- &resourceModel{
- Username: types.StringValue("username"),
- },
- []string{
- "role_1",
- "role_2",
- },
- &sqlserverflexalpha.CreateUserRequestPayload{
- Roles: &[]string{
- "role_1",
- "role_2",
- },
- Username: utils.Ptr("username"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &resourceModel{
- Username: types.StringNull(),
- },
- []string{
- "",
- },
- &sqlserverflexalpha.CreateUserRequestPayload{
- Roles: &[]string{
- "",
- },
- Username: nil,
- },
- true,
- },
- {
- "nil_model",
- nil,
- []string{},
- nil,
- false,
- },
- {
- "nil_roles",
- &resourceModel{
- Username: types.StringValue("username"),
- },
- []string{},
- &sqlserverflexalpha.CreateUserRequestPayload{
- Roles: &[]string{},
- Username: utils.Ptr("username"),
- },
- true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- output, err := toCreatePayload(tt.input, tt.inputRoles)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(tt.expected, output)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml
deleted file mode 100644
index 8ff346ab..00000000
--- a/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml
+++ /dev/null
@@ -1,49 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'user_id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'username'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'roles'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'password'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'uri'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'status'
- modifiers:
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go
deleted file mode 100644
index ee322fab..00000000
--- a/stackit/internal/services/sqlserverflexalpha/user/resource.go
+++ /dev/null
@@ -1,553 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- _ "embed"
- "errors"
- "fmt"
- "net/http"
- "slices"
- "strconv"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
- sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen"
-)
-
-var (
- _ resource.Resource = &userResource{}
- _ resource.ResourceWithConfigure = &userResource{}
- _ resource.ResourceWithImportState = &userResource{}
- _ resource.ResourceWithModifyPlan = &userResource{}
- _ resource.ResourceWithIdentity = &userResource{}
- _ resource.ResourceWithValidateConfig = &userResource{}
-)
-
-func NewUserResource() resource.Resource {
- return &userResource{}
-}
-
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexalphaResGen.UserModel
-
-// UserResourceIdentityModel describes the resource's identity attributes.
-type UserResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- UserID types.Int64 `tfsdk:"user_id"`
-}
-
-type userResource struct {
- client *sqlserverflexalpha.APIClient
- providerData core.ProviderData
-}
-
-func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user"
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *userResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClient := sqlserverflexalphaUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics)
- if resp.Diagnostics.HasError() {
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "SQLServer Beta Flex user client configured")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *userResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var configModel resourceModel
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var planModel resourceModel
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-// Schema defines the schema for the resource.
-func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexalphaResGen.UserResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-// IdentitySchema defines the schema for the resource's identity attributes.
-func (r *userResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- response *resource.IdentitySchemaResponse,
-) {
- response.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "user_id": identityschema.Int64Attribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
-func (r *userResource) ValidateConfig(
- ctx context.Context,
- req resource.ValidateConfigRequest,
- resp *resource.ValidateConfigResponse,
-) {
- var data resourceModel
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var roles []string
- diags := data.Roles.ElementsAs(ctx, &roles, false)
- resp.Diagnostics.Append(diags...)
- if diags.HasError() {
- return
- }
-
- var resRoles []string
- for _, role := range roles {
- if slices.Contains(resRoles, role) {
- resp.Diagnostics.AddAttributeError(
- path.Root("roles"),
- "Attribute Configuration Error",
- "defined roles MUST NOT contain duplicates",
- )
- return
- }
- resRoles = append(resRoles, role)
- }
-}
-
-// Create creates the resource and sets the initial Terraform state.
-func (r *userResource) Create(
- ctx context.Context,
- req resource.CreateRequest,
- resp *resource.CreateResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.Plan.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- region := model.Region.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
-
- var roles []string
- if !model.Roles.IsNull() && !model.Roles.IsUnknown() {
- diags = model.Roles.ElementsAs(ctx, &roles, false)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- slices.Sort(roles)
- }
-
- // Generate API request body from model
- payload, err := toCreatePayload(&model, roles)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err))
- return
- }
- // Create new user
- userResp, err := r.client.CreateUserRequest(
- ctx,
- projectId,
- region,
- instanceId,
- ).CreateUserRequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- if userResp == nil || userResp.Id == nil || *userResp.Id == 0 {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- "API didn't return user Id. A user might have been created",
- )
- return
- }
-
- userId := *userResp.Id
- ctx = tflog.SetField(ctx, "user_id", userId)
-
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- UserID: types.Int64Value(userId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- err = mapFieldsCreate(userResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- waitResp, err := sqlserverflexalphaWait.CreateUserWaitHandler(
- ctx,
- r.client,
- projectId,
- instanceId,
- region,
- userId,
- ).SetSleepBeforeWait(
- 90 * time.Second,
- ).SetTimeout(
- 90 * time.Minute,
- ).WaitWithContext(ctx)
-
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- fmt.Sprintf("Instance creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- "Instance creation waiting: returned id is nil",
- )
- return
- }
-
- // Map response body to schema
- err = mapFields(waitResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
- // Set state to fully populated data
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQLServer Flex user created")
-}
-
-// Read refreshes the Terraform state with the latest data.
-func (r *userResource) Read(
- ctx context.Context,
- req resource.ReadRequest,
- resp *resource.ReadResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- userId := model.UserId.ValueInt64()
- region := r.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
- ctx = tflog.SetField(ctx, "region", region)
-
- recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(
- err,
- &oapiErr,
- )
- //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if ok && oapiErr.StatusCode == http.StatusNotFound {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading user", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapFields(recordSetResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- UserID: types.Int64Value(userId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Set refreshed state
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQLServer Flex user read")
-}
-
-// Update updates the resource and sets the updated Terraform state on success.
-func (r *userResource) Update(
- ctx context.Context,
- _ resource.UpdateRequest,
- resp *resource.UpdateResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // Update shouldn't be called
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", "User can't be updated")
-}
-
-// Delete deletes the resource and removes the Terraform state on success.
-func (r *userResource) Delete(
- ctx context.Context,
- req resource.DeleteRequest,
- resp *resource.DeleteResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // Retrieve values from plan
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- userId := model.UserId.ValueInt64()
- region := model.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // Delete existing record set
- // err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- err := r.client.DeleteUserRequestExecute(ctx, projectId, region, instanceId, userId)
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- // TODO err handling
- return
- }
-
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- resp.State.RemoveResource(ctx)
- return
- // case http.StatusInternalServerError:
- // tflog.Warn(ctx, "[delete user] Wait handler got error 500")
- // return false, nil, nil
- default:
- // TODO err handling
- return
- }
- }
- // Delete existing record set
- _, err = sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "SQLServer Flex user deleted")
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *userResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
- req.ID,
- ),
- )
- return
- }
-
- userId, err := strconv.ParseInt(idParts[3], 10, 64)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
-
- tflog.Info(ctx, "SQLServer Flex user state imported")
-
- return
- }
-
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData UserResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
- userId := identityData.UserID.ValueInt64()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
-
- core.LogAndAddWarning(
- ctx,
- &resp.Diagnostics,
- "SQLServer Flex user imported with empty password",
- "The user password is not imported as it is only available upon creation of a new user. The password field will be empty.",
- )
- tflog.Info(ctx, "SQLServer Flex user state imported")
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util.go b/stackit/internal/services/sqlserverflexalpha/utils/util.go
deleted file mode 100644
index 7fbf0901..00000000
--- a/stackit/internal/services/sqlserverflexalpha/utils/util.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package utils
-
-import (
- "context"
- "fmt"
-
- sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-func ConfigureClient(
- ctx context.Context,
- providerData *core.ProviderData,
- diags *diag.Diagnostics,
-) *sqlserverflex.APIClient {
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(providerData.RoundTripper),
- utils.UserAgentConfigOption(providerData.Version),
- }
- if providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(providerData.GetRegion()))
- }
- apiClient, err := sqlserverflex.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- core.LogAndAddError(
- ctx,
- diags,
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return nil
- }
-
- return apiClient
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go b/stackit/internal/services/sqlserverflexalpha/utils/util_test.go
deleted file mode 100644
index 91f90030..00000000
--- a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go
+++ /dev/null
@@ -1,98 +0,0 @@
-package utils
-
-import (
- "context"
- "os"
- "reflect"
- "testing"
-
- "github.com/hashicorp/terraform-plugin-framework/diag"
- sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-const (
- testVersion = "1.2.3"
- testCustomEndpoint = "https://sqlserverflex-custom-endpoint.api.stackit.cloud"
-)
-
-func TestConfigureClient(t *testing.T) {
- /* mock authentication by setting service account token env variable */
- os.Clearenv()
- err := os.Setenv(sdkClients.ServiceAccountToken, "mock-val")
- if err != nil {
- t.Errorf("error setting env variable: %v", err)
- }
-
- type args struct {
- providerData *core.ProviderData
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- expected *sqlserverflex.APIClient
- }{
- {
- name: "default endpoint",
- args: args{
- providerData: &core.ProviderData{
- Version: testVersion,
- },
- },
- expected: func() *sqlserverflex.APIClient {
- apiClient, err := sqlserverflex.NewAPIClient(
- config.WithRegion("eu01"),
- utils.UserAgentConfigOption(testVersion),
- )
- if err != nil {
- t.Errorf("error configuring client: %v", err)
- }
- return apiClient
- }(),
- wantErr: false,
- },
- {
- name: "custom endpoint",
- args: args{
- providerData: &core.ProviderData{
- Version: testVersion,
- SQLServerFlexCustomEndpoint: testCustomEndpoint,
- },
- },
- expected: func() *sqlserverflex.APIClient {
- apiClient, err := sqlserverflex.NewAPIClient(
- utils.UserAgentConfigOption(testVersion),
- config.WithEndpoint(testCustomEndpoint),
- )
- if err != nil {
- t.Errorf("error configuring client: %v", err)
- }
- return apiClient
- }(),
- wantErr: false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- ctx := context.Background()
- diags := diag.Diagnostics{}
-
- actual := ConfigureClient(ctx, tt.args.providerData, &diags)
- if diags.HasError() != tt.wantErr {
- t.Errorf("ConfigureClient() error = %v, want %v", diags.HasError(), tt.wantErr)
- }
-
- if !reflect.DeepEqual(actual, tt.expected) {
- t.Errorf("ConfigureClient() = %v, want %v", actual, tt.expected)
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/versions/datasources_gen/version_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/versions/datasources_gen/version_data_source_gen.go
deleted file mode 100644
index cb9008f1..00000000
--- a/stackit/internal/services/sqlserverflexalpha/versions/datasources_gen/version_data_source_gen.go
+++ /dev/null
@@ -1,569 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexalpha
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func VersionDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "versions": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "beta": schema.BoolAttribute{
- Computed: true,
- Description: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.",
- MarkdownDescription: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.",
- },
- "deprecated": schema.StringAttribute{
- Computed: true,
- Description: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.",
- MarkdownDescription: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.",
- },
- "recommend": schema.BoolAttribute{
- Computed: true,
- Description: "Flag if the version is recommend by the STACKIT Team.",
- MarkdownDescription: "Flag if the version is recommend by the STACKIT Team.",
- },
- "version": schema.StringAttribute{
- Computed: true,
- Description: "The sqlserver version used for the instance.",
- MarkdownDescription: "The sqlserver version used for the instance.",
- },
- },
- CustomType: VersionsType{
- ObjectType: types.ObjectType{
- AttrTypes: VersionsValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "A list containing available sqlserver versions.",
- MarkdownDescription: "A list containing available sqlserver versions.",
- },
- },
- }
-}
-
-type VersionModel struct {
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Versions types.List `tfsdk:"versions"`
-}
-
-var _ basetypes.ObjectTypable = VersionsType{}
-
-type VersionsType struct {
- basetypes.ObjectType
-}
-
-func (t VersionsType) Equal(o attr.Type) bool {
- other, ok := o.(VersionsType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t VersionsType) String() string {
- return "VersionsType"
-}
-
-func (t VersionsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- betaAttribute, ok := attributes["beta"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `beta is missing from object`)
-
- return nil, diags
- }
-
- betaVal, ok := betaAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute))
- }
-
- deprecatedAttribute, ok := attributes["deprecated"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `deprecated is missing from object`)
-
- return nil, diags
- }
-
- deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute))
- }
-
- recommendAttribute, ok := attributes["recommend"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `recommend is missing from object`)
-
- return nil, diags
- }
-
- recommendVal, ok := recommendAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute))
- }
-
- versionAttribute, ok := attributes["version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `version is missing from object`)
-
- return nil, diags
- }
-
- versionVal, ok := versionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return VersionsValue{
- Beta: betaVal,
- Deprecated: deprecatedVal,
- Recommend: recommendVal,
- Version: versionVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewVersionsValueNull() VersionsValue {
- return VersionsValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewVersionsValueUnknown() VersionsValue {
- return VersionsValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewVersionsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (VersionsValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing VersionsValue Attribute Value",
- "While creating a VersionsValue value, a missing attribute value was detected. "+
- "A VersionsValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid VersionsValue Attribute Type",
- "While creating a VersionsValue value, an invalid attribute value was detected. "+
- "A VersionsValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("VersionsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra VersionsValue Attribute Value",
- "While creating a VersionsValue value, an extra attribute value was detected. "+
- "A VersionsValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra VersionsValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewVersionsValueUnknown(), diags
- }
-
- betaAttribute, ok := attributes["beta"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `beta is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- betaVal, ok := betaAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute))
- }
-
- deprecatedAttribute, ok := attributes["deprecated"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `deprecated is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute))
- }
-
- recommendAttribute, ok := attributes["recommend"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `recommend is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- recommendVal, ok := recommendAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute))
- }
-
- versionAttribute, ok := attributes["version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `version is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- versionVal, ok := versionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
- }
-
- if diags.HasError() {
- return NewVersionsValueUnknown(), diags
- }
-
- return VersionsValue{
- Beta: betaVal,
- Deprecated: deprecatedVal,
- Recommend: recommendVal,
- Version: versionVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewVersionsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) VersionsValue {
- object, diags := NewVersionsValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewVersionsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t VersionsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewVersionsValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewVersionsValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewVersionsValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewVersionsValueMust(VersionsValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t VersionsType) ValueType(ctx context.Context) attr.Value {
- return VersionsValue{}
-}
-
-var _ basetypes.ObjectValuable = VersionsValue{}
-
-type VersionsValue struct {
- Beta basetypes.BoolValue `tfsdk:"beta"`
- Deprecated basetypes.StringValue `tfsdk:"deprecated"`
- Recommend basetypes.BoolValue `tfsdk:"recommend"`
- Version basetypes.StringValue `tfsdk:"version"`
- state attr.ValueState
-}
-
-func (v VersionsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["beta"] = basetypes.BoolType{}.TerraformType(ctx)
- attrTypes["deprecated"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["recommend"] = basetypes.BoolType{}.TerraformType(ctx)
- attrTypes["version"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.Beta.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["beta"] = val
-
- val, err = v.Deprecated.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["deprecated"] = val
-
- val, err = v.Recommend.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["recommend"] = val
-
- val, err = v.Version.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["version"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v VersionsValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v VersionsValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v VersionsValue) String() string {
- return "VersionsValue"
-}
-
-func (v VersionsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "beta": basetypes.BoolType{},
- "deprecated": basetypes.StringType{},
- "recommend": basetypes.BoolType{},
- "version": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "beta": v.Beta,
- "deprecated": v.Deprecated,
- "recommend": v.Recommend,
- "version": v.Version,
- })
-
- return objVal, diags
-}
-
-func (v VersionsValue) Equal(o attr.Value) bool {
- other, ok := o.(VersionsValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Beta.Equal(other.Beta) {
- return false
- }
-
- if !v.Deprecated.Equal(other.Deprecated) {
- return false
- }
-
- if !v.Recommend.Equal(other.Recommend) {
- return false
- }
-
- if !v.Version.Equal(other.Version) {
- return false
- }
-
- return true
-}
-
-func (v VersionsValue) Type(ctx context.Context) attr.Type {
- return VersionsType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v VersionsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "beta": basetypes.BoolType{},
- "deprecated": basetypes.StringType{},
- "recommend": basetypes.BoolType{},
- "version": basetypes.StringType{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go
deleted file mode 100644
index c6fa31bf..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/datasource.go
+++ /dev/null
@@ -1,174 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/datasources_gen"
-)
-
-var _ datasource.DataSource = (*databaseDataSource)(nil)
-
-const errorPrefix = "[Sqlserverflexbeta - Database]"
-
-func NewDatabaseDataSource() datasource.DataSource {
- return &databaseDataSource{}
-}
-
-type dataSourceModel struct {
- sqlserverflexbetaGen.DatabaseModel
- TerraformId types.String `tfsdk:"id"`
-}
-
-type databaseDataSource struct {
- client *sqlserverflexbetaPkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *databaseDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database"
-}
-
-func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaGen.DatabaseDataSourceSchema(ctx)
- resp.Schema.Attributes["id"] = schema.StringAttribute{
- Computed: true,
- Description: "The terraform internal identifier.",
- MarkdownDescription: "The terraform internal identifier.",
- }
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *databaseDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- // Extract identifiers from the plan
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- instanceId := data.InstanceId.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- databaseName := data.DatabaseName.ValueString()
-
- databaseResp, err := d.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId)
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
- // Map response body to schema and populate Computed attribute values
- err = mapFields(databaseResp, &data, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "SQL Server Flex beta database read")
-}
-
-// handleReadError centralizes API error handling for the Read operation.
-func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) {
- utils.LogError(
- ctx,
- diags,
- err,
- "Reading database",
- fmt.Sprintf(
- "Could not retrieve database for instance %q in project %q.",
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusBadRequest: fmt.Sprintf(
- "Invalid request parameters for project %q and instance %q.",
- projectId,
- instanceId,
- ),
- http.StatusNotFound: fmt.Sprintf(
- "Database, instance %q, or project %q not found.",
- instanceId,
- projectId,
- ),
- http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
- },
- )
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper.go b/stackit/internal/services/sqlserverflexbeta/database/mapper.go
deleted file mode 100644
index 43a4344f..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/mapper.go
+++ /dev/null
@@ -1,103 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "fmt"
-
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
-func mapFields(source *sqlserverflexbeta.GetDatabaseResponse, model *dataSourceModel, region string) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == nil || *source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model given is nil")
- }
-
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != nil {
- databaseId = *source.Id
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseName = types.StringValue(source.GetName())
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(source.GetOwner())
- model.Region = types.StringValue(region)
- model.ProjectId = types.StringValue(model.ProjectId.ValueString())
- model.InstanceId = types.StringValue(model.InstanceId.ValueString())
- model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel())
- model.CollationName = types.StringValue(source.GetCollationName())
-
- model.TerraformId = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(),
- region,
- model.InstanceId.ValueString(),
- model.DatabaseName.ValueString(),
- )
-
- return nil
-}
-
-// mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource.
-func mapResourceFields(source *sqlserverflexbeta.GetDatabaseResponse, model *resourceModel, region string) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == nil || *source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
-
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != nil {
- databaseId = *source.Id
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseName = types.StringValue(source.GetName())
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(source.GetOwner())
- model.Region = types.StringValue(region)
- model.ProjectId = types.StringValue(model.ProjectId.ValueString())
- model.InstanceId = types.StringValue(model.InstanceId.ValueString())
-
- model.Compatibility = types.Int64Value(source.GetCompatibilityLevel())
- model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel())
-
- model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api
- model.CollationName = types.StringValue(source.GetCollationName())
-
- return nil
-}
-
-// toCreatePayload converts the resource model to an API create payload.
-func toCreatePayload(model *resourceModel) (*sqlserverflexbeta.CreateDatabaseRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- return &sqlserverflexbeta.CreateDatabaseRequestPayload{
- Name: model.Name.ValueStringPointer(),
- Owner: model.Owner.ValueStringPointer(),
- Collation: model.Collation.ValueStringPointer(),
- Compatibility: model.Compatibility.ValueInt64Pointer(),
- }, nil
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go
deleted file mode 100644
index f865f22f..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go
+++ /dev/null
@@ -1,232 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/datasources_gen"
-)
-
-func TestMapFields(t *testing.T) {
- type given struct {
- source *sqlserverflexbeta.GetDatabaseResponse
- model *dataSourceModel
- region string
- }
- type expected struct {
- model *dataSourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &sqlserverflexbeta.GetDatabaseResponse{
- Id: utils.Ptr(int64(1)),
- Name: utils.Ptr("my-db"),
- CollationName: utils.Ptr("collation"),
- CompatibilityLevel: utils.Ptr(int64(150)),
- Owner: utils.Ptr("my-owner"),
- },
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- },
- region: "eu01",
- },
- expected: expected{
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- DatabaseName: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- Region: types.StringValue("eu01"),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- CompatibilityLevel: types.Int64Value(150),
- CollationName: types.StringValue("collation"),
- },
- TerraformId: types.StringValue("my-project,eu01,my-instance,my-db"),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil source ID",
- given: given{
- source: &sqlserverflexbeta.GetDatabaseResponse{Id: nil},
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil model",
- given: given{
- source: &sqlserverflexbeta.GetDatabaseResponse{Id: utils.Ptr(int64(1))},
- model: nil,
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapFields(tc.given.source, tc.given.model, tc.given.region)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapResourceFields(t *testing.T) {
- type given struct {
- source *sqlserverflexbeta.GetDatabaseResponse
- model *resourceModel
- region string
- }
- type expected struct {
- model *resourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &sqlserverflexbeta.GetDatabaseResponse{
- Id: utils.Ptr(int64(1)),
- Name: utils.Ptr("my-db"),
- Owner: utils.Ptr("my-owner"),
- },
- model: &resourceModel{
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- region: "eu01",
- },
- expected: expected{
- model: &resourceModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- Compatibility: types.Int64Value(0),
- CompatibilityLevel: types.Int64Value(0),
- Collation: types.StringValue(""),
- CollationName: types.StringValue(""),
- DatabaseName: types.StringValue("my-db"),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- Region: types.StringValue("eu01"),
- Owner: types.StringValue("my-owner"),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &resourceModel{},
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapResourceFields(tc.given.source, tc.given.model, tc.given.region)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- type given struct {
- model *resourceModel
- }
- type expected struct {
- payload *sqlserverflexbeta.CreateDatabaseRequestPayload
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should convert model to payload",
- given: given{
- model: &resourceModel{
- Name: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- },
- },
- expected: expected{
- payload: &sqlserverflexbeta.CreateDatabaseRequestPayload{
- Name: utils.Ptr("my-db"),
- Owner: utils.Ptr("my-owner"),
- },
- },
- },
- {
- name: "should fail on nil model",
- given: given{model: nil},
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- actual, err := toCreatePayload(tc.given.model)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.payload, actual); diff != "" {
- t.Errorf("payload mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml
deleted file mode 100644
index 08d7e6cf..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml
+++ /dev/null
@@ -1,56 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'RequiresReplace'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'RequiresReplace'
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'name'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'collation'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'owner'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'database_name'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'collation_name'
- modifiers:
- - 'RequiresReplace'
- - 'UseStateForUnknown'
-
- - name: 'compatibility'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'compatibility_level'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go
deleted file mode 100644
index 9862ca57..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/resource.go
+++ /dev/null
@@ -1,559 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- _ "embed"
- "errors"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/resources_gen"
-)
-
-var (
- _ resource.Resource = &databaseResource{}
- _ resource.ResourceWithConfigure = &databaseResource{}
- _ resource.ResourceWithImportState = &databaseResource{}
- _ resource.ResourceWithModifyPlan = &databaseResource{}
- _ resource.ResourceWithIdentity = &databaseResource{}
-
- // Define errors
- errDatabaseNotFound = errors.New("database not found")
-)
-
-func NewDatabaseResource() resource.Resource {
- return &databaseResource{}
-}
-
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexbetaResGen.DatabaseModel
-
-type databaseResource struct {
- client *sqlserverflexbeta.APIClient
- providerData core.ProviderData
-}
-
-type DatabaseResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- DatabaseName types.String `tfsdk:"database_name"`
-}
-
-func (r *databaseResource) Metadata(
- _ context.Context,
- req resource.MetadataRequest,
- resp *resource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database"
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexbetaResGen.DatabaseResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-func (r *databaseResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- resp *resource.IdentitySchemaResponse,
-) {
- resp.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "database_name": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *databaseResource) Configure(
- ctx context.Context,
- req resource.ConfigureRequest,
- resp *resource.ConfigureResponse,
-) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
- }
- apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "sqlserverflexbeta.Database client configured")
-}
-
-func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data resourceModel
- createErr := "DB create error"
-
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- databaseName := data.Name.ValueString()
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- payLoad := sqlserverflexbeta.CreateDatabaseRequestPayload{}
- if !data.Collation.IsNull() && !data.Collation.IsUnknown() {
- payLoad.Collation = data.Collation.ValueStringPointer()
- }
-
- if !data.Compatibility.IsNull() && !data.Compatibility.IsUnknown() {
- payLoad.Compatibility = data.Compatibility.ValueInt64Pointer()
- }
-
- payLoad.Name = data.Name.ValueStringPointer()
- payLoad.Owner = data.Owner.ValueStringPointer()
-
- _, err := wait.WaitForUserWaitHandler(
- ctx,
- r.client,
- projectId,
- instanceId,
- region,
- data.Owner.ValueString(),
- ).
- SetSleepBeforeWait(10 * time.Second).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Calling API: %v", err),
- )
- return
- }
-
- createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId).
- CreateDatabaseRequestPayload(payLoad).
- Execute()
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Calling API: %v", err),
- )
- return
- }
-
- if createResp == nil || createResp.Id == nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- "API didn't return database Id. A database might have been created",
- )
- return
- }
-
- databaseId := *createResp.Id
-
- ctx = tflog.SetField(ctx, "database_id", databaseId)
-
- ctx = core.LogResponse(ctx)
-
- // Set data returned by API in identity
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseName: types.StringValue(databaseName),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- waitResp, err := wait.CreateDatabaseWaitHandler(
- ctx,
- r.client,
- projectId,
- instanceId,
- region,
- databaseName,
- ).SetSleepBeforeWait(
- 30 * time.Second,
- ).SetTimeout(
- 15 * time.Minute,
- ).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Database creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned id is nil",
- )
- return
- }
-
- if *waitResp.Id != databaseId {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned id is different",
- )
- return
- }
-
- if *waitResp.Owner != data.Owner.ValueString() {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned owner is different",
- )
- return
- }
-
- if *waitResp.Name != data.Name.ValueString() {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned name is different",
- )
- return
- }
-
- // Map response body to schema
- err = mapResourceFields(waitResp, &data, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set state to fully populated data
- resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save data into Terraform state
-
- tflog.Info(ctx, "sqlserverflexbeta.Database created")
-}
-
-func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- region := model.Region.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseName := model.DatabaseName.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- databaseResp, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapResourceFields(databaseResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save identity into Terraform state
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseName: types.StringValue(databaseName),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Set refreshed state
- resp.Diagnostics.Append(resp.State.Set(ctx, &model)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexbeta.Database read")
-}
-
-func (r *databaseResource) Update(ctx context.Context, _ resource.UpdateRequest, resp *resource.UpdateResponse) {
- // TODO: Check update api endpoint - not available at the moment, so return an error for now
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating database", "there is no way to update a database")
-}
-
-func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := model.ProjectId.ValueString()
- region := model.Region.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseName := model.DatabaseName.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- // Delete existing record set
- err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting database",
- fmt.Sprintf(
- "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId,
- ),
- )
- return
- }
-
- // TODO: wait handler??
-
- ctx = core.LogResponse(ctx)
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "sqlserverflexbeta.Database deleted")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *databaseResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
-
- var configModel resourceModel
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var planModel resourceModel
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var identityModel DatabaseResourceIdentityModel
- identityModel.ProjectID = planModel.ProjectId
- identityModel.Region = planModel.Region
-
- if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() {
- identityModel.InstanceID = planModel.InstanceId
- }
-
- if !planModel.Name.IsNull() && !planModel.Name.IsUnknown() {
- identityModel.DatabaseName = planModel.Name
- }
-
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *databaseResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[database_name] Got: %q",
- req.ID,
- ),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), idParts[3])...)
-
- var identityData DatabaseResourceIdentityModel
- identityData.ProjectID = types.StringValue(idParts[0])
- identityData.Region = types.StringValue(idParts[1])
- identityData.InstanceID = types.StringValue(idParts[2])
- identityData.DatabaseName = types.StringValue(idParts[3])
-
- resp.Diagnostics.Append(resp.Identity.Set(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "Sqlserverflexbeta database state imported")
- return
- }
-
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData DatabaseResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
- databaseName := identityData.DatabaseName.ValueString()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseName)...)
-
- tflog.Info(ctx, "Sqlserverflexbeta database state imported")
-}
-
-// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity mode
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go
deleted file mode 100644
index 06e055f2..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go
+++ /dev/null
@@ -1,355 +0,0 @@
-package sqlserverFlexBetaFlavor
-
-import (
- "context"
- "fmt"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen"
-)
-
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ datasource.DataSource = &flavorDataSource{}
- _ datasource.DataSourceWithConfigure = &flavorDataSource{}
-)
-
-type FlavorModel struct {
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- StorageClass types.String `tfsdk:"storage_class"`
- Cpu types.Int64 `tfsdk:"cpu"`
- Description types.String `tfsdk:"description"`
- Id types.String `tfsdk:"id"`
- FlavorId types.String `tfsdk:"flavor_id"`
- MaxGb types.Int64 `tfsdk:"max_gb"`
- Memory types.Int64 `tfsdk:"ram"`
- MinGb types.Int64 `tfsdk:"min_gb"`
- NodeType types.String `tfsdk:"node_type"`
- StorageClasses types.List `tfsdk:"storage_classes"`
-}
-
-// NewFlavorDataSource is a helper function to simplify the provider implementation.
-func NewFlavorDataSource() datasource.DataSource {
- return &flavorDataSource{}
-}
-
-// flavorDataSource is the data source implementation.
-type flavorDataSource struct {
- client *sqlserverflexbetaPkg.APIClient
- providerData core.ProviderData
-}
-
-// Metadata returns the data source type name.
-func (r *flavorDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavor"
-}
-
-// Configure adds the provider configured client to the data source.
-func (r *flavorDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(r.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "SQL Server Flex instance client configured")
-}
-
-func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- Attributes: map[string]schema.Attribute{
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The project ID of the flavor.",
- MarkdownDescription: "The project ID of the flavor.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region of the flavor.",
- MarkdownDescription: "The region of the flavor.",
- },
- "cpu": schema.Int64Attribute{
- Required: true,
- Description: "The cpu count of the instance.",
- MarkdownDescription: "The cpu count of the instance.",
- },
- "ram": schema.Int64Attribute{
- Required: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "storage_class": schema.StringAttribute{
- Required: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "node_type": schema.StringAttribute{
- Required: true,
- Description: "defines the nodeType it can be either single or HA",
- MarkdownDescription: "defines the nodeType it can be either single or HA",
- },
- "flavor_id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "description": schema.StringAttribute{
- Computed: true,
- Description: "The flavor description.",
- MarkdownDescription: "The flavor description.",
- },
- "id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "max_gb": schema.Int64Attribute{
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- "min_gb": schema.Int64Attribute{
- Computed: true,
- Description: "minimum storage which is required to order in Gigabyte.",
- MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- },
- "storage_classes": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Computed: true,
- },
- "max_io_per_sec": schema.Int64Attribute{
- Computed: true,
- },
- "max_through_in_mb": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: sqlserverflexbetaGen.StorageClassesType{
- ObjectType: types.ObjectType{
- AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- },
- //Attributes: map[string]schema.Attribute{
- // "project_id": schema.StringAttribute{
- // Required: true,
- // Description: "The cpu count of the instance.",
- // MarkdownDescription: "The cpu count of the instance.",
- // },
- // "region": schema.StringAttribute{
- // Required: true,
- // Description: "The flavor description.",
- // MarkdownDescription: "The flavor description.",
- // },
- // "cpu": schema.Int64Attribute{
- // Required: true,
- // Description: "The cpu count of the instance.",
- // MarkdownDescription: "The cpu count of the instance.",
- // },
- // "ram": schema.Int64Attribute{
- // Required: true,
- // Description: "The memory of the instance in Gibibyte.",
- // MarkdownDescription: "The memory of the instance in Gibibyte.",
- // },
- // "storage_class": schema.StringAttribute{
- // Required: true,
- // Description: "The memory of the instance in Gibibyte.",
- // MarkdownDescription: "The memory of the instance in Gibibyte.",
- // },
- // "description": schema.StringAttribute{
- // Computed: true,
- // Description: "The flavor description.",
- // MarkdownDescription: "The flavor description.",
- // },
- // "id": schema.StringAttribute{
- // Computed: true,
- // Description: "The terraform id of the instance flavor.",
- // MarkdownDescription: "The terraform id of the instance flavor.",
- // },
- // "flavor_id": schema.StringAttribute{
- // Computed: true,
- // Description: "The flavor id of the instance flavor.",
- // MarkdownDescription: "The flavor id of the instance flavor.",
- // },
- // "max_gb": schema.Int64Attribute{
- // Computed: true,
- // Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- // MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- // },
- // "min_gb": schema.Int64Attribute{
- // Computed: true,
- // Description: "minimum storage which is required to order in Gigabyte.",
- // MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- // },
- // "node_type": schema.StringAttribute{
- // Required: true,
- // Description: "defines the nodeType it can be either single or replica",
- // MarkdownDescription: "defines the nodeType it can be either single or replica",
- // },
- // "storage_classes": schema.ListNestedAttribute{
- // Computed: true,
- // NestedObject: schema.NestedAttributeObject{
- // Attributes: map[string]schema.Attribute{
- // "class": schema.StringAttribute{
- // Computed: true,
- // },
- // "max_io_per_sec": schema.Int64Attribute{
- // Computed: true,
- // },
- // "max_through_in_mb": schema.Int64Attribute{
- // Computed: true,
- // },
- // },
- // CustomType: sqlserverflexalphaGen.StorageClassesType{
- // ObjectType: types.ObjectType{
- // AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
- // },
- // },
- // },
- // },
- // },
- }
-}
-
-func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var model FlavorModel
- diags := req.Config.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- region := r.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- flavors, err := getAllFlavors(ctx, r.client, projectId, region)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
- return
- }
-
- var foundFlavors []sqlserverflexbetaPkg.ListFlavors
- for _, flavor := range flavors {
- if model.Cpu.ValueInt64() != *flavor.Cpu {
- continue
- }
- if model.Memory.ValueInt64() != *flavor.Memory {
- continue
- }
- if model.NodeType.ValueString() != *flavor.NodeType {
- continue
- }
- for _, sc := range *flavor.StorageClasses {
- if model.StorageClass.ValueString() != *sc.Class {
- continue
- }
- foundFlavors = append(foundFlavors, flavor)
- }
- }
- if len(foundFlavors) == 0 {
- resp.Diagnostics.AddError("get flavor", "could not find requested flavor")
- return
- }
- if len(foundFlavors) > 1 {
- resp.Diagnostics.AddError("get flavor", "found too many matching flavors")
- return
- }
-
- f := foundFlavors[0]
- model.Description = types.StringValue(*f.Description)
- model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
- model.FlavorId = types.StringValue(*f.Id)
- model.MaxGb = types.Int64Value(*f.MaxGB)
- model.MinGb = types.Int64Value(*f.MinGB)
-
- if f.StorageClasses == nil {
- model.StorageClasses = types.ListNull(sqlserverflexbetaGen.StorageClassesType{
- ObjectType: basetypes.ObjectType{
- AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
- },
- })
- } else {
- var scList []attr.Value
- for _, sc := range *f.StorageClasses {
- scList = append(
- scList,
- sqlserverflexbetaGen.NewStorageClassesValueMust(
- sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(*sc.Class),
- "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
- "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
- },
- ),
- )
- }
- storageClassesList := types.ListValueMust(
- sqlserverflexbetaGen.StorageClassesType{
- ObjectType: basetypes.ObjectType{
- AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- scList,
- )
- model.StorageClasses = storageClassesList
- }
-
- // Set refreshed state
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQL Server Flex flavors read")
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go
deleted file mode 100644
index a766197e..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go
+++ /dev/null
@@ -1,1909 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func FlavorDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "flavors": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "cpu": schema.Int64Attribute{
- Computed: true,
- Description: "The cpu count of the instance.",
- MarkdownDescription: "The cpu count of the instance.",
- },
- "description": schema.StringAttribute{
- Computed: true,
- Description: "The flavor description.",
- MarkdownDescription: "The flavor description.",
- },
- "id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "max_gb": schema.Int64Attribute{
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- "memory": schema.Int64Attribute{
- Computed: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "min_gb": schema.Int64Attribute{
- Computed: true,
- Description: "minimum storage which is required to order in Gigabyte.",
- MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- },
- "node_type": schema.StringAttribute{
- Computed: true,
- Description: "defines the nodeType it can be either single or HA",
- MarkdownDescription: "defines the nodeType it can be either single or HA",
- },
- "storage_classes": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Computed: true,
- },
- "max_io_per_sec": schema.Int64Attribute{
- Computed: true,
- },
- "max_through_in_mb": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: StorageClassesType{
- ObjectType: types.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- },
- CustomType: FlavorsType{
- ObjectType: types.ObjectType{
- AttrTypes: FlavorsValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "List of flavors available for the project.",
- MarkdownDescription: "List of flavors available for the project.",
- },
- "page": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of the page of items list to be returned.",
- MarkdownDescription: "Number of the page of items list to be returned.",
- },
- "pagination": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
- Computed: true,
- },
- "size": schema.Int64Attribute{
- Computed: true,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- },
- "total_pages": schema.Int64Attribute{
- Computed: true,
- },
- "total_rows": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: PaginationType{
- ObjectType: types.ObjectType{
- AttrTypes: PaginationValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "size": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of items to be returned on each page.",
- MarkdownDescription: "Number of items to be returned on each page.",
- },
- "sort": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "Sorting of the flavors to be returned on each page.",
- MarkdownDescription: "Sorting of the flavors to be returned on each page.",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "index.desc",
- "index.asc",
- "cpu.desc",
- "cpu.asc",
- "flavor_description.asc",
- "flavor_description.desc",
- "id.desc",
- "id.asc",
- "size_max.desc",
- "size_max.asc",
- "ram.desc",
- "ram.asc",
- "size_min.desc",
- "size_min.asc",
- "storage_class.asc",
- "storage_class.desc",
- "node_type.asc",
- "node_type.desc",
- ),
- },
- },
- },
- }
-}
-
-type FlavorModel struct {
- Flavors types.List `tfsdk:"flavors"`
- Page types.Int64 `tfsdk:"page"`
- Pagination PaginationValue `tfsdk:"pagination"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
- Sort types.String `tfsdk:"sort"`
-}
-
-var _ basetypes.ObjectTypable = FlavorsType{}
-
-type FlavorsType struct {
- basetypes.ObjectType
-}
-
-func (t FlavorsType) Equal(o attr.Type) bool {
- other, ok := o.(FlavorsType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t FlavorsType) String() string {
- return "FlavorsType"
-}
-
-func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- cpuAttribute, ok := attributes["cpu"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `cpu is missing from object`)
-
- return nil, diags
- }
-
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
- }
-
- descriptionAttribute, ok := attributes["description"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `description is missing from object`)
-
- return nil, diags
- }
-
- descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return nil, diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- maxGbAttribute, ok := attributes["max_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_gb is missing from object`)
-
- return nil, diags
- }
-
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
- }
-
- memoryAttribute, ok := attributes["memory"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `memory is missing from object`)
-
- return nil, diags
- }
-
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
- }
-
- minGbAttribute, ok := attributes["min_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `min_gb is missing from object`)
-
- return nil, diags
- }
-
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
- }
-
- nodeTypeAttribute, ok := attributes["node_type"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `node_type is missing from object`)
-
- return nil, diags
- }
-
- nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
- }
-
- storageClassesAttribute, ok := attributes["storage_classes"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `storage_classes is missing from object`)
-
- return nil, diags
- }
-
- storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return FlavorsValue{
- Cpu: cpuVal,
- Description: descriptionVal,
- Id: idVal,
- MaxGb: maxGbVal,
- Memory: memoryVal,
- MinGb: minGbVal,
- NodeType: nodeTypeVal,
- StorageClasses: storageClassesVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewFlavorsValueNull() FlavorsValue {
- return FlavorsValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewFlavorsValueUnknown() FlavorsValue {
- return FlavorsValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing FlavorsValue Attribute Value",
- "While creating a FlavorsValue value, a missing attribute value was detected. "+
- "A FlavorsValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid FlavorsValue Attribute Type",
- "While creating a FlavorsValue value, an invalid attribute value was detected. "+
- "A FlavorsValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra FlavorsValue Attribute Value",
- "While creating a FlavorsValue value, an extra attribute value was detected. "+
- "A FlavorsValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewFlavorsValueUnknown(), diags
- }
-
- cpuAttribute, ok := attributes["cpu"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `cpu is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
- }
-
- descriptionAttribute, ok := attributes["description"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `description is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- maxGbAttribute, ok := attributes["max_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_gb is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
- }
-
- memoryAttribute, ok := attributes["memory"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `memory is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
- }
-
- minGbAttribute, ok := attributes["min_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `min_gb is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
- }
-
- nodeTypeAttribute, ok := attributes["node_type"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `node_type is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
- }
-
- storageClassesAttribute, ok := attributes["storage_classes"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `storage_classes is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
- }
-
- if diags.HasError() {
- return NewFlavorsValueUnknown(), diags
- }
-
- return FlavorsValue{
- Cpu: cpuVal,
- Description: descriptionVal,
- Id: idVal,
- MaxGb: maxGbVal,
- Memory: memoryVal,
- MinGb: minGbVal,
- NodeType: nodeTypeVal,
- StorageClasses: storageClassesVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue {
- object, diags := NewFlavorsValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewFlavorsValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewFlavorsValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewFlavorsValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
- return FlavorsValue{}
-}
-
-var _ basetypes.ObjectValuable = FlavorsValue{}
-
-type FlavorsValue struct {
- Cpu basetypes.Int64Value `tfsdk:"cpu"`
- Description basetypes.StringValue `tfsdk:"description"`
- Id basetypes.StringValue `tfsdk:"id"`
- MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
- Memory basetypes.Int64Value `tfsdk:"memory"`
- MinGb basetypes.Int64Value `tfsdk:"min_gb"`
- NodeType basetypes.StringValue `tfsdk:"node_type"`
- StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
- state attr.ValueState
-}
-
-func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 8)
-
- var val tftypes.Value
- var err error
-
- attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["storage_classes"] = basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- }.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 8)
-
- val, err = v.Cpu.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["cpu"] = val
-
- val, err = v.Description.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["description"] = val
-
- val, err = v.Id.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["id"] = val
-
- val, err = v.MaxGb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_gb"] = val
-
- val, err = v.Memory.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["memory"] = val
-
- val, err = v.MinGb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["min_gb"] = val
-
- val, err = v.NodeType.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["node_type"] = val
-
- val, err = v.StorageClasses.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["storage_classes"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v FlavorsValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v FlavorsValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v FlavorsValue) String() string {
- return "FlavorsValue"
-}
-
-func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- storageClasses := types.ListValueMust(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- v.StorageClasses.Elements(),
- )
-
- if v.StorageClasses.IsNull() {
- storageClasses = types.ListNull(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- )
- }
-
- if v.StorageClasses.IsUnknown() {
- storageClasses = types.ListUnknown(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- )
- }
-
- attributeTypes := map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
- "description": basetypes.StringType{},
- "id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
- "node_type": basetypes.StringType{},
- "storage_classes": basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- },
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "cpu": v.Cpu,
- "description": v.Description,
- "id": v.Id,
- "max_gb": v.MaxGb,
- "memory": v.Memory,
- "min_gb": v.MinGb,
- "node_type": v.NodeType,
- "storage_classes": storageClasses,
- })
-
- return objVal, diags
-}
-
-func (v FlavorsValue) Equal(o attr.Value) bool {
- other, ok := o.(FlavorsValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Cpu.Equal(other.Cpu) {
- return false
- }
-
- if !v.Description.Equal(other.Description) {
- return false
- }
-
- if !v.Id.Equal(other.Id) {
- return false
- }
-
- if !v.MaxGb.Equal(other.MaxGb) {
- return false
- }
-
- if !v.Memory.Equal(other.Memory) {
- return false
- }
-
- if !v.MinGb.Equal(other.MinGb) {
- return false
- }
-
- if !v.NodeType.Equal(other.NodeType) {
- return false
- }
-
- if !v.StorageClasses.Equal(other.StorageClasses) {
- return false
- }
-
- return true
-}
-
-func (v FlavorsValue) Type(ctx context.Context) attr.Type {
- return FlavorsType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
- "description": basetypes.StringType{},
- "id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
- "node_type": basetypes.StringType{},
- "storage_classes": basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- },
- }
-}
-
-var _ basetypes.ObjectTypable = StorageClassesType{}
-
-type StorageClassesType struct {
- basetypes.ObjectType
-}
-
-func (t StorageClassesType) Equal(o attr.Type) bool {
- other, ok := o.(StorageClassesType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t StorageClassesType) String() string {
- return "StorageClassesType"
-}
-
-func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return nil, diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_io_per_sec is missing from object`)
-
- return nil, diags
- }
-
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
- }
-
- maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_through_in_mb is missing from object`)
-
- return nil, diags
- }
-
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return StorageClassesValue{
- Class: classVal,
- MaxIoPerSec: maxIoPerSecVal,
- MaxThroughInMb: maxThroughInMbVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageClassesValueNull() StorageClassesValue {
- return StorageClassesValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewStorageClassesValueUnknown() StorageClassesValue {
- return StorageClassesValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing StorageClassesValue Attribute Value",
- "While creating a StorageClassesValue value, a missing attribute value was detected. "+
- "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid StorageClassesValue Attribute Type",
- "While creating a StorageClassesValue value, an invalid attribute value was detected. "+
- "A StorageClassesValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra StorageClassesValue Attribute Value",
- "While creating a StorageClassesValue value, an extra attribute value was detected. "+
- "A StorageClassesValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewStorageClassesValueUnknown(), diags
- }
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_io_per_sec is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
- }
-
- maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_through_in_mb is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
- }
-
- if diags.HasError() {
- return NewStorageClassesValueUnknown(), diags
- }
-
- return StorageClassesValue{
- Class: classVal,
- MaxIoPerSec: maxIoPerSecVal,
- MaxThroughInMb: maxThroughInMbVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue {
- object, diags := NewStorageClassesValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewStorageClassesValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewStorageClassesValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewStorageClassesValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t StorageClassesType) ValueType(ctx context.Context) attr.Value {
- return StorageClassesValue{}
-}
-
-var _ basetypes.ObjectValuable = StorageClassesValue{}
-
-type StorageClassesValue struct {
- Class basetypes.StringValue `tfsdk:"class"`
- MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
- MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
- state attr.ValueState
-}
-
-func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 3)
-
- var val tftypes.Value
- var err error
-
- attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 3)
-
- val, err = v.Class.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["class"] = val
-
- val, err = v.MaxIoPerSec.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_io_per_sec"] = val
-
- val, err = v.MaxThroughInMb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_through_in_mb"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v StorageClassesValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v StorageClassesValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v StorageClassesValue) String() string {
- return "StorageClassesValue"
-}
-
-func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "class": v.Class,
- "max_io_per_sec": v.MaxIoPerSec,
- "max_through_in_mb": v.MaxThroughInMb,
- })
-
- return objVal, diags
-}
-
-func (v StorageClassesValue) Equal(o attr.Value) bool {
- other, ok := o.(StorageClassesValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Class.Equal(other.Class) {
- return false
- }
-
- if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) {
- return false
- }
-
- if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) {
- return false
- }
-
- return true
-}
-
-func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
- return StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
- }
-}
-
-var _ basetypes.ObjectTypable = PaginationType{}
-
-type PaginationType struct {
- basetypes.ObjectType
-}
-
-func (t PaginationType) Equal(o attr.Type) bool {
- other, ok := o.(PaginationType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t PaginationType) String() string {
- return "PaginationType"
-}
-
-func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return nil, diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return nil, diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return nil, diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return nil, diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueNull() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewPaginationValueUnknown() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing PaginationValue Attribute Value",
- "While creating a PaginationValue value, a missing attribute value was detected. "+
- "A PaginationValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid PaginationValue Attribute Type",
- "While creating a PaginationValue value, an invalid attribute value was detected. "+
- "A PaginationValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra PaginationValue Attribute Value",
- "While creating a PaginationValue value, an extra attribute value was detected. "+
- "A PaginationValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
- object, diags := NewPaginationValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewPaginationValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewPaginationValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewPaginationValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t PaginationType) ValueType(ctx context.Context) attr.Value {
- return PaginationValue{}
-}
-
-var _ basetypes.ObjectValuable = PaginationValue{}
-
-type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
- Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
- state attr.ValueState
-}
-
-func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 5)
-
- var val tftypes.Value
- var err error
-
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 5)
-
- val, err = v.Page.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["page"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- val, err = v.Sort.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["sort"] = val
-
- val, err = v.TotalPages.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_pages"] = val
-
- val, err = v.TotalRows.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_rows"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v PaginationValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v PaginationValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v PaginationValue) String() string {
- return "PaginationValue"
-}
-
-func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "page": v.Page,
- "size": v.Size,
- "sort": v.Sort,
- "total_pages": v.TotalPages,
- "total_rows": v.TotalRows,
- })
-
- return objVal, diags
-}
-
-func (v PaginationValue) Equal(o attr.Value) bool {
- other, ok := o.(PaginationValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Page.Equal(other.Page) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- if !v.Sort.Equal(other.Sort) {
- return false
- }
-
- if !v.TotalPages.Equal(other.TotalPages) {
- return false
- }
-
- if !v.TotalRows.Equal(other.TotalRows) {
- return false
- }
-
- return true
-}
-
-func (v PaginationValue) Type(ctx context.Context) attr.Type {
- return PaginationType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/functions.go b/stackit/internal/services/sqlserverflexbeta/flavor/functions.go
deleted file mode 100644
index 8c06da73..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavor/functions.go
+++ /dev/null
@@ -1,65 +0,0 @@
-package sqlserverFlexBetaFlavor
-
-import (
- "context"
- "fmt"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-)
-
-type flavorsClientReader interface {
- GetFlavorsRequest(
- ctx context.Context,
- projectId, region string,
- ) sqlserverflexbeta.ApiGetFlavorsRequestRequest
-}
-
-func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
- []sqlserverflexbeta.ListFlavors,
- error,
-) {
- getAllFilter := func(_ sqlserverflexbeta.ListFlavors) bool { return true }
- flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
- if err != nil {
- return nil, err
- }
- return flavorList, nil
-}
-
-// getFlavorsByFilter is a helper function to retrieve flavors using a filtern function.
-// Hint: The API does not have a GetFlavors endpoint, only ListFlavors
-func getFlavorsByFilter(
- ctx context.Context,
- client flavorsClientReader,
- projectId, region string,
- filter func(db sqlserverflexbeta.ListFlavors) bool,
-) ([]sqlserverflexbeta.ListFlavors, error) {
- if projectId == "" || region == "" {
- return nil, fmt.Errorf("listing sqlserverflexbeta flavors: projectId and region are required")
- }
-
- const pageSize = 25
-
- var result = make([]sqlserverflexbeta.ListFlavors, 0)
-
- for page := int64(1); ; page++ {
- res, err := client.GetFlavorsRequest(ctx, projectId, region).
- Page(page).Size(pageSize).Sort(sqlserverflexbeta.FLAVORSORT_INDEX_ASC).Execute()
- if err != nil {
- return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
- }
-
- // If the API returns no flavors, we have reached the end of the list.
- if res.Flavors == nil || len(*res.Flavors) == 0 {
- break
- }
-
- for _, flavor := range *res.Flavors {
- if filter(flavor) {
- result = append(result, flavor)
- }
- }
- }
-
- return result, nil
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go b/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go
deleted file mode 100644
index fb666253..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go
+++ /dev/null
@@ -1,135 +0,0 @@
-package sqlserverFlexBetaFlavor
-
-import (
- "context"
- "testing"
-
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-)
-
-type mockRequest struct {
- executeFunc func() (*sqlserverflexbeta.GetFlavorsResponse, error)
-}
-
-func (m *mockRequest) Page(_ int64) sqlserverflexbeta.ApiGetFlavorsRequestRequest { return m }
-func (m *mockRequest) Size(_ int64) sqlserverflexbeta.ApiGetFlavorsRequestRequest { return m }
-func (m *mockRequest) Sort(_ sqlserverflexbeta.FlavorSort) sqlserverflexbeta.ApiGetFlavorsRequestRequest {
- return m
-}
-func (m *mockRequest) Execute() (*sqlserverflexbeta.GetFlavorsResponse, error) {
- return m.executeFunc()
-}
-
-type mockFlavorsClient struct {
- executeRequest func() sqlserverflexbeta.ApiGetFlavorsRequestRequest
-}
-
-func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) sqlserverflexbeta.ApiGetFlavorsRequestRequest {
- return m.executeRequest()
-}
-
-var mockResp = func(page int64) (*sqlserverflexbeta.GetFlavorsResponse, error) {
- if page == 1 {
- return &sqlserverflexbeta.GetFlavorsResponse{
- Flavors: &[]sqlserverflexbeta.ListFlavors{
- {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
- {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
- },
- }, nil
- }
- if page == 2 {
- return &sqlserverflexbeta.GetFlavorsResponse{
- Flavors: &[]sqlserverflexbeta.ListFlavors{
- {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
- },
- }, nil
- }
-
- return &sqlserverflexbeta.GetFlavorsResponse{
- Flavors: &[]sqlserverflexbeta.ListFlavors{},
- }, nil
-}
-
-func TestGetFlavorsByFilter(t *testing.T) {
- tests := []struct {
- description string
- projectId string
- region string
- mockErr error
- filter func(sqlserverflexbeta.ListFlavors) bool
- wantCount int
- wantErr bool
- }{
- {
- description: "Success - Get all flavors (2 pages)",
- projectId: "pid", region: "reg",
- filter: func(_ sqlserverflexbeta.ListFlavors) bool { return true },
- wantCount: 3,
- wantErr: false,
- },
- {
- description: "Success - Filter flavors by description",
- projectId: "pid", region: "reg",
- filter: func(f sqlserverflexbeta.ListFlavors) bool { return *f.Description == "first" },
- wantCount: 1,
- wantErr: false,
- },
- {
- description: "Error - Missing parameters",
- projectId: "", region: "reg",
- wantErr: true,
- },
- }
-
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- var currentPage int64
- client := &mockFlavorsClient{
- executeRequest: func() sqlserverflexbeta.ApiGetFlavorsRequestRequest {
- return &mockRequest{
- executeFunc: func() (*sqlserverflexbeta.GetFlavorsResponse, error) {
- currentPage++
- return mockResp(currentPage)
- },
- }
- },
- }
- actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter)
-
- if (err != nil) != tt.wantErr {
- t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
-
- if !tt.wantErr && len(actual) != tt.wantCount {
- t.Errorf("getFlavorsByFilter() got %d flavors, want %d", len(actual), tt.wantCount)
- }
- },
- )
- }
-}
-
-func TestGetAllFlavors(t *testing.T) {
- var currentPage int64
- client := &mockFlavorsClient{
- executeRequest: func() sqlserverflexbeta.ApiGetFlavorsRequestRequest {
- return &mockRequest{
- executeFunc: func() (*sqlserverflexbeta.GetFlavorsResponse, error) {
- currentPage++
- return mockResp(currentPage)
- },
- }
- },
- }
-
- res, err := getAllFlavors(context.Background(), client, "pid", "reg")
- if err != nil {
- t.Errorf("getAllFlavors() unexpected error: %v", err)
- }
- if len(res) != 3 {
- t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go
deleted file mode 100644
index b6be1dd4..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go
+++ /dev/null
@@ -1,156 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen"
-)
-
-var _ datasource.DataSource = (*flavorsDataSource)(nil)
-
-const errorPrefix = "[Sqlserverflexbeta - Flavors]"
-
-func NewFlavorsDataSource() datasource.DataSource {
- return &flavorsDataSource{}
-}
-
-type dataSourceModel struct {
- sqlserverflexbetaGen.FlavorsModel
- TerraformId types.String `tfsdk:"id"`
-}
-
-type flavorsDataSource struct {
- client *sqlserverflexbetaPkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *flavorsDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavors"
-}
-
-func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaGen.FlavorsDataSourceSchema(ctx)
- resp.Schema.Attributes["id"] = schema.StringAttribute{
- Computed: true,
- Description: "The terraform internal identifier.",
- MarkdownDescription: "The terraform internal identifier.",
- }
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *flavorsDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
-
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- // TODO: implement right identifier for flavors
- flavorsId := data.Flavors
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // TODO: implement needed fields
- ctx = tflog.SetField(ctx, "flavors_id", flavorsId)
-
- // TODO: refactor to correct implementation
- _, err := d.client.GetFlavorsRequest(ctx, projectId, region).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading flavors",
- fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // TODO: refactor to correct implementation of internal tf id
- data.TerraformId = utils.BuildInternalTerraformId(projectId, region)
-
- // TODO: fill remaining fields
- // data.Flavors = types.Sometype(apiResponse.GetFlavors())
- // data.Page = types.Sometype(apiResponse.GetPage())
- // data.Pagination = types.Sometype(apiResponse.GetPagination())
- // data.ProjectId = types.Sometype(apiResponse.GetProjectId())
- // data.Region = types.Sometype(apiResponse.GetRegion())
- // data.Size = types.Sometype(apiResponse.GetSize())
- // data.Sort = types.Sometype(apiResponse.GetSort())// Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, fmt.Sprintf("%s read successful", errorPrefix))
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go
deleted file mode 100644
index 2830ac62..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go
+++ /dev/null
@@ -1,146 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen"
-)
-
-var _ datasource.DataSource = (*instanceDataSource)(nil)
-
-const errorPrefix = "[Sqlserverflexbeta - Instance]"
-
-func NewInstanceDataSource() datasource.DataSource {
- return &instanceDataSource{}
-}
-
-// dataSourceModel maps the data source schema data.
-type dataSourceModel struct {
- sqlserverflexbetaGen.InstanceModel
- TerraformID types.String `tfsdk:"id"`
-}
-
-type instanceDataSource struct {
- client *sqlserverflexbetaPkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *instanceDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance"
-}
-
-func (d *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaGen.InstanceDataSourceSchema(ctx)
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *instanceDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
-
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- instanceId := data.InstanceId.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- instanceResp, err := d.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading instance",
- fmt.Sprintf("instance with ID %q does not exist in project %q.", instanceId, projectId),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- err = mapDataResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- fmt.Sprintf("%s Read", errorPrefix),
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go
deleted file mode 100644
index 77791ee6..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go
+++ /dev/null
@@ -1,272 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "errors"
- "fmt"
- "math"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- sqlserverflexbetaDataGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen"
- sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
-)
-
-func mapResponseToModel(
- ctx context.Context,
- resp *sqlserverflexbeta.GetInstanceResponse,
- m *sqlserverflexbetaResGen.InstanceModel,
- tfDiags diag.Diagnostics,
-) error {
- m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Edition = types.StringValue(string(resp.GetEdition()))
- m.Encryption = handleEncryption(ctx, m, resp)
- m.FlavorId = types.StringValue(resp.GetFlavorId())
- m.Id = types.StringValue(resp.GetId())
- m.InstanceId = types.StringValue(resp.GetId())
- m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
- m.Name = types.StringValue(resp.GetName())
- netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting network acl response value",
- )
- }
- net, diags := sqlserverflexbetaResGen.NewNetworkValue(
- sqlserverflexbetaResGen.NetworkValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
- "acl": netAcl,
- "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
- "router_address": types.StringValue(resp.Network.GetRouterAddress()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return errors.New("error converting network response value")
- }
- m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
- m.Status = types.StringValue(string(resp.GetStatus()))
-
- stor, diags := sqlserverflexbetaResGen.NewStorageValue(
- sqlserverflexbetaResGen.StorageValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(resp.Storage.GetClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf("error converting storage response value")
- }
- m.Storage = stor
-
- m.Version = types.StringValue(string(resp.GetVersion()))
- return nil
-}
-
-func mapDataResponseToModel(
- ctx context.Context,
- resp *sqlserverflexbeta.GetInstanceResponse,
- m *dataSourceModel,
- tfDiags diag.Diagnostics,
-) error {
- m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Edition = types.StringValue(string(resp.GetEdition()))
- m.Encryption = handleDSEncryption(ctx, m, resp)
- m.FlavorId = types.StringValue(resp.GetFlavorId())
- m.Id = types.StringValue(resp.GetId())
- m.InstanceId = types.StringValue(resp.GetId())
- m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
- m.Name = types.StringValue(resp.GetName())
- netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting network acl response value",
- )
- }
- net, diags := sqlserverflexbetaDataGen.NewNetworkValue(
- sqlserverflexbetaDataGen.NetworkValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
- "acl": netAcl,
- "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
- "router_address": types.StringValue(resp.Network.GetRouterAddress()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return errors.New("error converting network response value")
- }
- m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
- m.Status = types.StringValue(string(resp.GetStatus()))
-
- stor, diags := sqlserverflexbetaDataGen.NewStorageValue(
- sqlserverflexbetaDataGen.StorageValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(resp.Storage.GetClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf("error converting storage response value")
- }
- m.Storage = stor
-
- m.Version = types.StringValue(string(resp.GetVersion()))
- return nil
-}
-
-func handleEncryption(
- ctx context.Context,
- m *sqlserverflexbetaResGen.InstanceModel,
- resp *sqlserverflexbeta.GetInstanceResponse,
-) sqlserverflexbetaResGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == nil ||
- resp.Encryption.KekKeyRingId == nil ||
- resp.Encryption.KekKeyVersion == nil ||
- resp.Encryption.ServiceAccount == nil {
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return sqlserverflexbetaResGen.NewEncryptionValueNull()
- }
- return m.Encryption
- }
-
- enc := sqlserverflexbetaResGen.NewEncryptionValueMust(
- sqlserverflexbetaResGen.EncryptionValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "kek_key_id": types.StringValue(resp.Encryption.GetKekKeyId()),
- "kek_key_ring_id": types.StringValue(resp.Encryption.GetKekKeyRingId()),
- "kek_key_version": types.StringValue(resp.Encryption.GetKekKeyVersion()),
- "service_account": types.StringValue(resp.Encryption.GetServiceAccount()),
- },
- )
- return enc
-}
-
-func handleDSEncryption(
- ctx context.Context,
- m *dataSourceModel,
- resp *sqlserverflexbeta.GetInstanceResponse,
-) sqlserverflexbetaDataGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == nil ||
- resp.Encryption.KekKeyRingId == nil ||
- resp.Encryption.KekKeyVersion == nil ||
- resp.Encryption.ServiceAccount == nil {
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return sqlserverflexbetaDataGen.NewEncryptionValueNull()
- }
- return m.Encryption
- }
-
- enc := sqlserverflexbetaDataGen.NewEncryptionValueMust(
- sqlserverflexbetaDataGen.EncryptionValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "kek_key_id": types.StringValue(resp.Encryption.GetKekKeyId()),
- "kek_key_ring_id": types.StringValue(resp.Encryption.GetKekKeyRingId()),
- "kek_key_version": types.StringValue(resp.Encryption.GetKekKeyVersion()),
- "service_account": types.StringValue(resp.Encryption.GetServiceAccount()),
- },
- )
- return enc
-}
-
-func toCreatePayload(
- ctx context.Context,
- model *sqlserverflexbetaResGen.InstanceModel,
-) (*sqlserverflexbeta.CreateInstanceRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- storagePayload := &sqlserverflexbeta.CreateInstanceRequestPayloadGetStorageArgType{}
- if !model.Storage.IsNull() && !model.Storage.IsUnknown() {
- storagePayload.Class = model.Storage.Class.ValueStringPointer()
- storagePayload.Size = model.Storage.Size.ValueInt64Pointer()
- }
-
- var encryptionPayload *sqlserverflexbeta.CreateInstanceRequestPayloadGetEncryptionArgType = nil
- if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
- encryptionPayload = &sqlserverflexbeta.CreateInstanceRequestPayloadGetEncryptionArgType{}
- encryptionPayload.KekKeyId = model.Encryption.KekKeyId.ValueStringPointer()
- encryptionPayload.KekKeyRingId = model.Encryption.KekKeyRingId.ValueStringPointer()
- encryptionPayload.KekKeyVersion = model.Encryption.KekKeyVersion.ValueStringPointer()
- encryptionPayload.ServiceAccount = model.Encryption.ServiceAccount.ValueStringPointer()
- }
-
- networkPayload := &sqlserverflexbeta.CreateInstanceRequestPayloadGetNetworkArgType{}
- if !model.Network.IsNull() && !model.Network.IsUnknown() {
- networkPayload.AccessScope = sqlserverflexbeta.CreateInstanceRequestPayloadNetworkGetAccessScopeAttributeType(
- model.Network.AccessScope.ValueStringPointer(),
- )
-
- var resList []string
- diags := model.Network.Acl.ElementsAs(ctx, &resList, false)
- if diags.HasError() {
- return nil, fmt.Errorf("error converting network acl list")
- }
- networkPayload.Acl = &resList
- }
-
- return &sqlserverflexbeta.CreateInstanceRequestPayload{
- BackupSchedule: conversion.StringValueToPointer(model.BackupSchedule),
- Encryption: encryptionPayload,
- FlavorId: conversion.StringValueToPointer(model.FlavorId),
- Name: conversion.StringValueToPointer(model.Name),
- Network: networkPayload,
- RetentionDays: conversion.Int64ValueToPointer(model.RetentionDays),
- Storage: storagePayload,
- Version: sqlserverflexbeta.CreateInstanceRequestPayloadGetVersionAttributeType(
- conversion.StringValueToPointer(model.Version),
- ),
- }, nil
-}
-
-func toUpdatePayload(
- ctx context.Context,
- m *sqlserverflexbetaResGen.InstanceModel,
- resp *resource.UpdateResponse,
-) (*sqlserverflexbeta.UpdateInstanceRequestPayload, error) {
- if m == nil {
- return nil, fmt.Errorf("nil model")
- }
- if m.Replicas.ValueInt64() > math.MaxUint32 {
- return nil, fmt.Errorf("replicas value is too big for uint32")
- }
- replVal := sqlserverflexbeta.Replicas(uint32(m.Replicas.ValueInt64())) // nolint:gosec // check is performed above
-
- var netAcl []string
- diags := m.Network.Acl.ElementsAs(ctx, &netAcl, false)
- resp.Diagnostics.Append(diags...)
- if diags.HasError() {
- return nil, fmt.Errorf("error converting model network acl value")
- }
- return &sqlserverflexbeta.UpdateInstanceRequestPayload{
- BackupSchedule: m.BackupSchedule.ValueStringPointer(),
- FlavorId: m.FlavorId.ValueStringPointer(),
- Name: m.Name.ValueStringPointer(),
- Network: sqlserverflexbeta.NewUpdateInstanceRequestPayloadNetwork(netAcl),
- Replicas: &replVal,
- RetentionDays: m.RetentionDays.ValueInt64Pointer(),
- Storage: &sqlserverflexbeta.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()},
- Version: sqlserverflexbeta.UpdateInstanceRequestPayloadGetVersionAttributeType(
- m.Version.ValueStringPointer(),
- ),
- }, nil
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go b/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go
deleted file mode 100644
index e9728b80..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go
+++ /dev/null
@@ -1,283 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "reflect"
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- sqlserverflexbetaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- sqlserverflexbetaRs "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
-)
-
-func Test_handleDSEncryption(t *testing.T) {
- type args struct {
- m *dataSourceModel
- resp *sqlserverflexbetaPkgGen.GetInstanceResponse
- }
- tests := []struct {
- name string
- args args
- want sqlserverflexbetaRs.EncryptionValue
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- if got := handleDSEncryption(t.Context(), tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
- t.Errorf("handleDSEncryption() = %v, want %v", got, tt.want)
- }
- },
- )
- }
-}
-
-func Test_handleEncryption(t *testing.T) {
- type args struct {
- m *sqlserverflexbetaRs.InstanceModel
- resp *sqlserverflexbetaPkgGen.GetInstanceResponse
- }
- tests := []struct {
- name string
- args args
- want sqlserverflexbetaRs.EncryptionValue
- }{
- {
- name: "nil response",
- args: args{
- m: &sqlserverflexbetaRs.InstanceModel{},
- resp: &sqlserverflexbetaPkgGen.GetInstanceResponse{},
- },
- want: sqlserverflexbetaRs.EncryptionValue{},
- },
- {
- name: "nil response",
- args: args{
- m: &sqlserverflexbetaRs.InstanceModel{},
- resp: &sqlserverflexbetaPkgGen.GetInstanceResponse{
- Encryption: &sqlserverflexbetaPkgGen.InstanceEncryption{},
- },
- },
- want: sqlserverflexbetaRs.NewEncryptionValueNull(),
- },
- {
- name: "response with values",
- args: args{
- m: &sqlserverflexbetaRs.InstanceModel{},
- resp: &sqlserverflexbetaPkgGen.GetInstanceResponse{
- Encryption: &sqlserverflexbetaPkgGen.InstanceEncryption{
- KekKeyId: utils.Ptr("kek_key_id"),
- KekKeyRingId: utils.Ptr("kek_key_ring_id"),
- KekKeyVersion: utils.Ptr("kek_key_version"),
- ServiceAccount: utils.Ptr("kek_svc_acc"),
- },
- },
- },
- want: sqlserverflexbetaRs.NewEncryptionValueMust(
- sqlserverflexbetaRs.EncryptionValue{}.AttributeTypes(context.TODO()),
- map[string]attr.Value{
- "kek_key_id": types.StringValue("kek_key_id"),
- "kek_key_ring_id": types.StringValue("kek_key_ring_id"),
- "kek_key_version": types.StringValue("kek_key_version"),
- "service_account": types.StringValue("kek_svc_acc"),
- },
- ),
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- got := handleEncryption(t.Context(), tt.args.m, tt.args.resp)
-
- diff := cmp.Diff(tt.want, got)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
-
- //if !reflect.DeepEqual(got, tt.want) {
- // t.Errorf("handleEncryption() = %v, want %v", got, tt.want)
- //}
- },
- )
- }
-}
-
-func Test_mapDataResponseToModel(t *testing.T) {
- type args struct {
- ctx context.Context
- resp *sqlserverflexbetaPkgGen.GetInstanceResponse
- m *dataSourceModel
- tfDiags diag.Diagnostics
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- if err := mapDataResponseToModel(
- tt.args.ctx,
- tt.args.resp,
- tt.args.m,
- tt.args.tfDiags,
- ); (err != nil) != tt.wantErr {
- t.Errorf("mapDataResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
- }
- },
- )
- }
-}
-
-func Test_mapResponseToModel(t *testing.T) {
- type args struct {
- ctx context.Context
- resp *sqlserverflexbetaPkgGen.GetInstanceResponse
- m *sqlserverflexbetaRs.InstanceModel
- tfDiags diag.Diagnostics
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- if err := mapResponseToModel(
- tt.args.ctx,
- tt.args.resp,
- tt.args.m,
- tt.args.tfDiags,
- ); (err != nil) != tt.wantErr {
- t.Errorf("mapResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
- }
- },
- )
- }
-}
-
-func Test_toCreatePayload(t *testing.T) {
- type args struct {
- ctx context.Context
- model *sqlserverflexbetaRs.InstanceModel
- }
- tests := []struct {
- name string
- args args
- want *sqlserverflexbetaPkgGen.CreateInstanceRequestPayload
- wantErr bool
- }{
- {
- name: "simple",
- args: args{
- ctx: context.Background(),
- model: &sqlserverflexbetaRs.InstanceModel{
- Encryption: sqlserverflexbetaRs.NewEncryptionValueMust(
- sqlserverflexbetaRs.EncryptionValue{}.AttributeTypes(context.Background()),
- map[string]attr.Value{
- "kek_key_id": types.StringValue("kek_key_id"),
- "kek_key_ring_id": types.StringValue("kek_key_ring_id"),
- "kek_key_version": types.StringValue("kek_key_version"),
- "service_account": types.StringValue("sacc"),
- },
- ),
- Storage: sqlserverflexbetaRs.StorageValue{},
- },
- },
- want: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayload{
- BackupSchedule: nil,
- Encryption: &sqlserverflexbetaPkgGen.InstanceEncryption{
- KekKeyId: utils.Ptr("kek_key_id"),
- KekKeyRingId: utils.Ptr("kek_key_ring_id"),
- KekKeyVersion: utils.Ptr("kek_key_version"),
- ServiceAccount: utils.Ptr("sacc"),
- },
- FlavorId: nil,
- Name: nil,
- Network: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadNetwork{},
- RetentionDays: nil,
- Storage: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadGetStorageArgType{},
- Version: nil,
- },
- wantErr: false,
- },
- {
- name: "nil object",
- args: args{
- ctx: context.Background(),
- model: &sqlserverflexbetaRs.InstanceModel{
- Encryption: sqlserverflexbetaRs.NewEncryptionValueNull(),
- Storage: sqlserverflexbetaRs.StorageValue{},
- },
- },
- want: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayload{
- BackupSchedule: nil,
- Encryption: nil,
- FlavorId: nil,
- Name: nil,
- Network: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadNetwork{},
- RetentionDays: nil,
- Storage: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadGetStorageArgType{},
- Version: nil,
- },
- wantErr: false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- got, err := toCreatePayload(tt.args.ctx, tt.args.model)
- if (err != nil) != tt.wantErr {
- t.Errorf("toCreatePayload() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
- if diff := cmp.Diff(tt.want, got); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- },
- )
- }
-}
-
-func Test_toUpdatePayload(t *testing.T) {
- type args struct {
- ctx context.Context
- m *sqlserverflexbetaRs.InstanceModel
- resp *resource.UpdateResponse
- }
- tests := []struct {
- name string
- args args
- want *sqlserverflexbetaPkgGen.UpdateInstanceRequestPayload
- wantErr bool
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- got, err := toUpdatePayload(tt.args.ctx, tt.args.m, tt.args.resp)
- if (err != nil) != tt.wantErr {
- t.Errorf("toUpdatePayload() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
- if !reflect.DeepEqual(got, tt.want) {
- t.Errorf("toUpdatePayload() got = %v, want %v", got, tt.want)
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml
deleted file mode 100644
index 71d4cbe4..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml
+++ /dev/null
@@ -1,124 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'name'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'backup_schedule'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'encryption.kek_key_id'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'encryption.kek_key_version'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'encryption.kek_key_ring_id'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'encryption.service_account'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'network.access_scope'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'network.acl'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'network.instance_address'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'network.router_address'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'status'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'region'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'retention_days'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'edition'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'version'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'replicas'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'storage'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'storage.class'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'storage.size'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'flavor_id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'is_deletable'
- modifiers:
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go
deleted file mode 100644
index 044b4b43..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go
+++ /dev/null
@@ -1,548 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- _ "embed"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
-)
-
-var (
- _ resource.Resource = &instanceResource{}
- _ resource.ResourceWithConfigure = &instanceResource{}
- _ resource.ResourceWithImportState = &instanceResource{}
- _ resource.ResourceWithModifyPlan = &instanceResource{}
- _ resource.ResourceWithIdentity = &instanceResource{}
-)
-
-func NewInstanceResource() resource.Resource {
- return &instanceResource{}
-}
-
-type instanceResource struct {
- client *sqlserverflexbeta.APIClient
- providerData core.ProviderData
-}
-
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexbetaResGen.InstanceModel
-
-type InstanceResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
-}
-
-func (r *instanceResource) Metadata(
- _ context.Context,
- req resource.MetadataRequest,
- resp *resource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance"
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexbetaResGen.InstanceResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-func (r *instanceResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- resp *resource.IdentitySchemaResponse,
-) {
- resp.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *instanceResource) Configure(
- ctx context.Context,
- req resource.ConfigureRequest,
- resp *resource.ConfigureResponse,
-) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
- }
- apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "sqlserverflexbeta.Instance client configured")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *instanceResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
- var configModel resourceModel
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- if req.Plan.Raw.IsNull() {
- return
- }
- var planModel resourceModel
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data resourceModel
- crateErr := "[SQL Server Flex BETA - Create] error"
-
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // Generate API request body from model
- payload, err := toCreatePayload(ctx, &data)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- fmt.Sprintf("Creating API payload: %v", err),
- )
- return
- }
-
- // Create new Instance
- createResp, err := r.client.CreateInstanceRequest(
- ctx,
- projectId,
- region,
- ).CreateInstanceRequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, crateErr, fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- InstanceId := *createResp.Id
-
- // Example data value setting
- data.InstanceId = types.StringValue("id-from-response")
-
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(InstanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- waitResp, err := wait.CreateInstanceWaitHandler(
- ctx,
- r.client,
- projectId,
- InstanceId,
- region,
- ).SetSleepBeforeWait(
- 10 * time.Second,
- ).SetTimeout(
- 90 * time.Minute,
- ).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- fmt.Sprintf("Instance creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- "Instance creation waiting: returned id is nil",
- )
- return
- }
-
- // Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- fmt.Sprintf("processing API payload: %v", err),
- )
- return
- }
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "sqlserverflexbeta.Instance created")
-}
-
-func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data resourceModel
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if ok && oapiErr.StatusCode == http.StatusNotFound {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", err.Error())
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading instance",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save identity into Terraform state
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexbeta.Instance read")
-}
-
-func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data resourceModel
- updateInstanceError := "Error updating instance"
-
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- // Generate API request body from model
- payload, err := toUpdatePayload(ctx, &data, resp)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- updateInstanceError,
- fmt.Sprintf("Creating API payload: %v", err),
- )
- return
- }
- // Update existing instance
- err = r.client.UpdateInstanceRequest(
- ctx,
- projectId,
- region,
- instanceId,
- ).UpdateInstanceRequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, updateInstanceError, err.Error())
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- waitResp, err := wait.
- UpdateInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).
- SetSleepBeforeWait(15 * time.Second).
- SetTimeout(45 * time.Minute).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- updateInstanceError,
- fmt.Sprintf("Instance update waiting: %v", err),
- )
- return
- }
-
- // Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- updateInstanceError,
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexbeta.Instance updated")
-}
-
-func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data resourceModel
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Read identity data
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- instanceId := identityData.InstanceID.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- // Delete existing instance
- err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- delResp, err := wait.DeleteInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting instance",
- fmt.Sprintf("Instance deletion waiting: %v", err),
- )
- return
- }
-
- if delResp != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting instance",
- "wait handler returned non nil result",
- )
- return
- }
-
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "sqlserverflexbeta.Instance deleted")
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *instanceResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing instance",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id] Got: %q",
- req.ID,
- ),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- return
- }
-
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
-
- tflog.Info(ctx, "Sqlserverflexbeta instance state imported")
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go
deleted file mode 100644
index 887c5edd..00000000
--- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go
+++ /dev/null
@@ -1,494 +0,0 @@
-package sqlserverflexbeta_test
-
-import (
- "context"
- _ "embed"
- "fmt"
- "log"
- "os"
- "strconv"
- "strings"
- "testing"
-
- "github.com/hashicorp/terraform-plugin-testing/helper/acctest"
- "github.com/hashicorp/terraform-plugin-testing/helper/resource"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
- sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance"
-
- // The fwresource import alias is so there is no collision
- // with the more typical acceptance testing import:
- // "github.com/hashicorp/terraform-plugin-testing/helper/resource"
- fwresource "github.com/hashicorp/terraform-plugin-framework/resource"
-)
-
-const providerPrefix = "stackitprivatepreview_sqlserverflexbeta"
-
-var testInstances []string
-
-func init() {
- sweeperName := fmt.Sprintf("%s_%s", providerPrefix, "sweeper")
-
- resource.AddTestSweepers(sweeperName, &resource.Sweeper{
- Name: sweeperName,
- F: func(region string) error {
- ctx := context.Background()
- apiClientConfigOptions := []config.ConfigurationOption{}
- apiClient, err := sqlserverflexbetaResGen.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- log.Fatalln(err)
- }
-
- instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, region).
- Size(100).
- Execute()
- if err != nil {
- log.Fatalln(err)
- }
-
- for _, inst := range instances.GetInstances() {
- if strings.HasPrefix(inst.GetName(), "tf-acc-") {
- for _, item := range testInstances {
- if inst.GetName() == item {
- delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, region, inst.GetId())
- if delErr != nil {
- // TODO: maybe just warn?
- log.Fatalln(delErr)
- }
- }
- }
- }
- }
- return nil
- },
- })
-}
-
-func TestInstanceResourceSchema(t *testing.T) {
- t.Parallel()
-
- ctx := context.Background()
- schemaRequest := fwresource.SchemaRequest{}
- schemaResponse := &fwresource.SchemaResponse{}
-
- // Instantiate the resource.Resource and call its Schema method
- sqlserverflexbeta.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse)
-
- if schemaResponse.Diagnostics.HasError() {
- t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics)
- }
-
- // Validate the schema
- diagnostics := schemaResponse.Schema.ValidateImplementation(ctx)
-
- if diagnostics.HasError() {
- t.Fatalf("Schema validation diagnostics: %+v", diagnostics)
- }
-}
-
-func TestMain(m *testing.M) {
- testutils.Setup()
- code := m.Run()
- // shutdown()
- os.Exit(code)
-}
-
-func testAccPreCheck(t *testing.T) {
- if _, ok := os.LookupEnv("TF_ACC_PROJECT_ID"); !ok {
- t.Fatalf("could not find env var TF_ACC_PROJECT_ID")
- }
-}
-
-type resData struct {
- ServiceAccountFilePath string
- ProjectId string
- Region string
- Name string
- TfName string
- FlavorId string
- BackupSchedule string
- UseEncryption bool
- KekKeyId string
- KekKeyRingId string
- KekKeyVersion uint8
- KekServiceAccount string
- PerformanceClass string
- Size uint32
- AclString string
- AccessScope string
- RetentionDays uint32
- Version string
- Users []User
- Databases []Database
-}
-
-type User struct {
- Name string
- ProjectId string
- Roles []string
-}
-
-type Database struct {
- Name string
- ProjectId string
- Owner string
- Collation string
- Compatibility string
-}
-
-func resName(res, name string) string {
- return fmt.Sprintf("%s_%s.%s", providerPrefix, res, name)
-}
-
-func getExample() resData {
- name := acctest.RandomWithPrefix("tf-acc")
- return resData{
- Region: os.Getenv("TF_ACC_REGION"),
- ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Name: name,
- TfName: name,
- FlavorId: "4.16-Single",
- BackupSchedule: "0 0 * * *",
- UseEncryption: false,
- RetentionDays: 33,
- PerformanceClass: "premium-perf2-stackit",
- Size: 10,
- AclString: "0.0.0.0/0",
- AccessScope: "PUBLIC",
- Version: "2022",
- }
-}
-
-func TestAccInstance(t *testing.T) {
- exData := getExample()
-
- updNameData := exData
- updNameData.Name = "name-updated"
-
- updSizeData := exData
- updSizeData.Size = 25
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", exData.TfName)
- testInstances = append(testInstances, exData.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- // Update name and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updNameData,
- ),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", updNameData.Name),
- ),
- },
- // Update size and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updSizeData,
- ),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(
- testutils.ResStr(providerPrefix, "instance", exData.TfName),
- "storage.size",
- strconv.Itoa(int(updSizeData.Size)),
- ),
- ),
- },
- {
- RefreshState: true,
- },
- //// Import test
- //{
- // ResourceName: resName("instance", exData.TfName),
- // ImportState: true,
- // ImportStateVerify: true,
- // },
- },
- })
-}
-
-func TestAccInstanceReApply(t *testing.T) {
- exData := getExample()
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", exData.TfName)
- testInstances = append(testInstances, exData.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- {
- RefreshState: true,
- },
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- // Import test
- {
- ResourceName: resName("instance", exData.TfName),
- ImportStateKind: resource.ImportBlockWithResourceIdentity,
- ImportState: true,
- // ImportStateVerify is not supported with plannable import blocks
- // ImportStateVerify: true,
- },
- },
- })
-}
-
-func TestAccInstanceNoEncryption(t *testing.T) {
- data := getExample()
-
- dbName := "testDb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{
- "##STACKIT_DatabaseManager##",
- "##STACKIT_LoginManager##",
- "##STACKIT_ProcessManager##",
- "##STACKIT_SQLAgentManager##",
- "##STACKIT_SQLAgentUser##",
- "##STACKIT_ServerManager##",
- },
- },
- }
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- testInstances = append(testInstances, data.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- // check instance values are set
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"),
-
- resource.TestCheckNoResourceAttr(resName("instance", data.TfName), "encryption"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"),
-
- // check instance values are correct
- resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name),
-
- // check user values are set
- resource.TestCheckResourceAttrSet(resName("user", userName), "id"),
- resource.TestCheckResourceAttrSet(resName("user", userName), "username"),
- // resource.TestCheckResourceAttrSet(resName("user", userName), "roles"),
-
- // func(s *terraform.State) error {
- // return nil
- // },
-
- // check user values are correct
- resource.TestCheckResourceAttr(resName("user", userName), "username", userName),
- resource.TestCheckResourceAttr(resName("user", userName), "roles.#", strconv.Itoa(len(data.Users[0].Roles))),
-
- // check database values are set
- resource.TestCheckResourceAttrSet(resName("database", dbName), "id"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "name"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"),
-
- // check database values are correct
- resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName),
- ),
- },
- },
- })
-}
-
-func TestAccInstanceEncryption(t *testing.T) {
- data := getExample()
-
- dbName := "testDb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{"##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"},
- },
- }
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
-
- data.UseEncryption = true
- data.KekKeyId = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
- data.KekKeyRingId = "6a2d95ab-3c4c-4963-a2bb-08d17a320e27"
- data.KekKeyVersion = 1
- data.KekServiceAccount = "henselinm-u2v3ex1@sa.stackit.cloud"
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- testInstances = append(testInstances, data.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- // check instance values are set
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"),
-
- // check instance values are correct
- resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name),
-
- // check user values are set
- resource.TestCheckResourceAttrSet(resName("user", userName), "id"),
- resource.TestCheckResourceAttrSet(resName("user", userName), "username"),
-
- // func(s *terraform.State) error {
- // return nil
- // },
-
- // check user values are correct
- resource.TestCheckResourceAttr(resName("user", userName), "username", userName),
- resource.TestCheckResourceAttr(resName("user", userName), "roles.#", "2"),
-
- // check database values are set
- resource.TestCheckResourceAttrSet(resName("database", dbName), "id"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "name"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"),
-
- // check database values are correct
- resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName),
- ),
- },
- },
- })
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl b/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl
deleted file mode 100644
index e71f3fa0..00000000
--- a/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl
+++ /dev/null
@@ -1,60 +0,0 @@
-provider "stackitprivatepreview" {
- default_region = "{{ .Region }}"
- service_account_key_path = "{{ .ServiceAccountFilePath }}"
-}
-
-resource "stackitprivatepreview_sqlserverflexbeta_instance" "{{ .TfName }}" {
- project_id = "{{ .ProjectId }}"
- name = "{{ .Name }}"
- backup_schedule = "{{ .BackupSchedule }}"
- retention_days = {{ .RetentionDays }}
- flavor_id = "{{ .FlavorId }}"
- storage = {
- class = "{{ .PerformanceClass }}"
- size = {{ .Size }}
- }
-{{ if .UseEncryption }}
- encryption = {
- kek_key_id = "{{ .KekKeyId }}"
- kek_key_ring_id = "{{ .KekKeyRingId }}"
- kek_key_version = {{ .KekKeyVersion }}
- service_account = "{{ .KekServiceAccount }}"
- }
-{{ end }}
- network = {
- acl = ["{{ .AclString }}"]
- access_scope = "{{ .AccessScope }}"
- }
- version = "{{ .Version }}"
-}
-
-{{ if .Users }}
-{{ $tfName := .TfName }}
-{{ range $user := .Users }}
-resource "stackitprivatepreview_sqlserverflexbeta_user" "{{ $user.Name }}" {
- project_id = "{{ $user.ProjectId }}"
- instance_id = stackitprivatepreview_sqlserverflexbeta_instance.{{ $tfName }}.instance_id
- username = "{{ $user.Name }}"
- roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}]
-}
-{{ end }}
-{{ end }}
-
-{{ if .Databases }}
-{{ $tfName := .TfName }}
-{{ range $db := .Databases }}
-resource "stackitprivatepreview_sqlserverflexbeta_database" "{{ $db.Name }}" {
- depends_on = [stackitprivatepreview_sqlserverflexbeta_user.{{ $db.Owner }}]
- project_id = "{{ $db.ProjectId }}"
- instance_id = stackitprivatepreview_sqlserverflexbeta_instance.{{ $tfName }}.instance_id
- name = "{{ $db.Name }}"
- owner = "{{ $db.Owner }}"
-{{ if $db.Collation }}
- collation = "{{ $db.Collation }}"
-{{ end }}
-{{ if $db.Compatibility }}
- compatibility = "{{ $db.Compatibility }}"
-{{ end }}
-}
-{{ end }}
-{{ end }}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go
deleted file mode 100644
index d726bc2b..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/datasource.go
+++ /dev/null
@@ -1,139 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/datasources_gen"
-)
-
-var _ datasource.DataSource = (*userDataSource)(nil)
-
-func NewUserDataSource() datasource.DataSource {
- return &userDataSource{}
-}
-
-type dataSourceModel struct {
- DefaultDatabase types.String `tfsdk:"default_database"`
- Host types.String `tfsdk:"host"`
- Id types.String `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Port types.Int64 `tfsdk:"port"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Roles types.List `tfsdk:"roles"`
- Status types.String `tfsdk:"status"`
- UserId types.Int64 `tfsdk:"user_id"`
- Username types.String `tfsdk:"username"`
-}
-
-type userDataSource struct {
- client *sqlserverflexbetaPkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *userDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
-}
-
-func (d *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaGen.UserDataSourceSchema(ctx)
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *userDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
- if resp.Diagnostics.HasError() {
- return
- }
- d.client = apiClient
- tflog.Info(ctx, "SQL SERVER Flex alpha database client configured")
-}
-
-func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var model dataSourceModel
- diags := req.Config.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- userId := model.UserId.ValueInt64()
- region := d.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
- ctx = tflog.SetField(ctx, "region", region)
-
- recordSetResp, err := d.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading user",
- fmt.Sprintf(
- "User with ID %q or instance with ID %q does not exist in project %q.",
- userId,
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema and populate Computed attribute values
- err = mapDataSourceFields(recordSetResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set refreshed state
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQLServer Flex beta instance read")
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper.go b/stackit/internal/services/sqlserverflexbeta/user/mapper.go
deleted file mode 100644
index ca916d28..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/mapper.go
+++ /dev/null
@@ -1,198 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "fmt"
- "slices"
- "strconv"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapDataSourceFields maps the API response to a dataSourceModel.
-func mapDataSourceFields(userResp *sqlserverflexbeta.GetUserResponse, model *dataSourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- // Handle user ID
- var userId int64
- if model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != nil {
- userId = *user.Id
- } else {
- return fmt.Errorf("user id not present")
- }
-
- // Set main attributes
- model.Id = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
- )
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringPointerValue(user.Username)
-
- // Map roles
- if user.Roles == nil {
- model.Roles = types.List(types.SetNull(types.StringType))
- } else {
- var roles []attr.Value
- resRoles := *user.Roles
- slices.Sort(resRoles)
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
-
- // Set remaining attributes
- model.Host = types.StringPointerValue(user.Host)
- model.Port = types.Int64PointerValue(user.Port)
- model.Region = types.StringValue(region)
- model.Status = types.StringPointerValue(user.Status)
- model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase)
-
- return nil
-}
-
-// mapFields maps the API response to a resourceModel.
-func mapFields(userResp *sqlserverflexbeta.GetUserResponse, model *resourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- // Handle user ID
- var userId int64
- if model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != nil {
- userId = *user.Id
- } else {
- return fmt.Errorf("user id not present")
- }
-
- // Set main attributes
- model.Id = types.Int64Value(userId)
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringPointerValue(user.Username)
-
- // Map roles
- if userResp.Roles != nil {
- resRoles := *userResp.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
-
- rolesSet, diags := types.ListValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = rolesSet
- }
-
- // Ensure roles is not null
- if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.List(types.SetNull(types.StringType))
- }
-
- // Set connection details
- model.Host = types.StringPointerValue(user.Host)
- model.Port = types.Int64PointerValue(user.Port)
- model.Region = types.StringValue(region)
- return nil
-}
-
-// mapFieldsCreate maps the API response from creating a user to a resourceModel.
-func mapFieldsCreate(userResp *sqlserverflexbeta.CreateUserResponse, model *resourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- if user.Id == nil {
- return fmt.Errorf("user id not present")
- }
- userId := *user.Id
- model.Id = types.Int64Value(userId)
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringPointerValue(user.Username)
-
- if user.Password == nil {
- return fmt.Errorf("user password not present")
- }
- model.Password = types.StringValue(*user.Password)
-
- if user.Roles != nil {
- resRoles := *user.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesList, diags := types.ListValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = rolesList
- }
-
- if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.List(types.SetNull(types.StringType))
- }
-
- model.Password = types.StringPointerValue(user.Password)
- model.Uri = types.StringPointerValue(user.Uri)
-
- model.Host = types.StringPointerValue(user.Host)
- model.Port = types.Int64PointerValue(user.Port)
- model.Region = types.StringValue(region)
- model.Status = types.StringPointerValue(user.Status)
- model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase)
-
- return nil
-}
-
-// toCreatePayload converts a resourceModel to an API CreateUserRequestPayload.
-func toCreatePayload(
- model *resourceModel,
- roles []string,
-) (*sqlserverflexbeta.CreateUserRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- pl := sqlserverflexbeta.CreateUserRequestPayload{
- Username: conversion.StringValueToPointer(model.Username),
- Roles: &roles,
- }
- slices.Sort(roles)
- if !model.DefaultDatabase.IsNull() || !model.DefaultDatabase.IsUnknown() {
- pl.DefaultDatabase = conversion.StringValueToPointer(model.DefaultDatabase)
- }
-
- return &pl, nil
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go
deleted file mode 100644
index c0e09bda..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go
+++ /dev/null
@@ -1,527 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-)
-
-func TestMapDataSourceFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *sqlserverflexbeta.GetUserResponse
- region string
- expected dataSourceModel
- isValid bool
- }{
- {
- "default_values",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetNull(types.StringType)),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- Status: types.StringNull(),
- DefaultDatabase: types.StringNull(),
- },
- true,
- },
- {
- "simple_values",
- &sqlserverflexbeta.GetUserResponse{
- Roles: &[]string{
- "role_1",
- "role_2",
- "",
- },
- Username: utils.Ptr("username"),
- Host: utils.Ptr("host"),
- Port: utils.Ptr(int64(1234)),
- Status: utils.Ptr("active"),
- DefaultDatabase: utils.Ptr("default_db"),
- },
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("active"),
- DefaultDatabase: types.StringValue("default_db"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &sqlserverflexbeta.GetUserResponse{
- Id: utils.Ptr(int64(1)),
- Roles: &[]string{},
- Username: nil,
- Host: nil,
- Port: utils.Ptr(int64(2123456789)),
- },
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Host: types.StringNull(),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "nil_response_2",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "no_resource_id",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &dataSourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapDataSourceFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFieldsCreate(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *sqlserverflexbeta.CreateUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &sqlserverflexbeta.CreateUserResponse{
- Id: utils.Ptr(int64(1)),
- Password: utils.Ptr(""),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetNull(types.StringType)),
- Password: types.StringValue(""),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "simple_values",
- &sqlserverflexbeta.CreateUserResponse{
- Id: utils.Ptr(int64(2)),
- Roles: &[]string{
- "role_1",
- "role_2",
- "",
- },
- Username: utils.Ptr("username"),
- Password: utils.Ptr("password"),
- Host: utils.Ptr("host"),
- Port: utils.Ptr(int64(1234)),
- Status: utils.Ptr("status"),
- DefaultDatabase: utils.Ptr("default_db"),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(2),
- UserId: types.Int64Value(2),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Password: types.StringValue("password"),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("status"),
- DefaultDatabase: types.StringValue("default_db"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &sqlserverflexbeta.CreateUserResponse{
- Id: utils.Ptr(int64(3)),
- Roles: &[]string{},
- Username: nil,
- Password: utils.Ptr(""),
- Host: nil,
- Port: utils.Ptr(int64(2123456789)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(3),
- UserId: types.Int64Value(3),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Password: types.StringValue(""),
- Host: types.StringNull(),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- DefaultDatabase: types.StringNull(),
- Status: types.StringNull(),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- {
- "nil_response_2",
- &sqlserverflexbeta.CreateUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_resource_id",
- &sqlserverflexbeta.CreateUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_password",
- &sqlserverflexbeta.CreateUserResponse{
- Id: utils.Ptr(int64(1)),
- },
- testRegion,
- resourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- }
- err := mapFieldsCreate(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *sqlserverflexbeta.GetUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetNull(types.StringType)),
- Host: types.StringNull(),
- Port: types.Int64Null(),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "simple_values",
- &sqlserverflexbeta.GetUserResponse{
- Roles: &[]string{
- "role_2",
- "role_1",
- "",
- },
- Username: utils.Ptr("username"),
- Host: utils.Ptr("host"),
- Port: utils.Ptr(int64(1234)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(2),
- UserId: types.Int64Value(2),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &sqlserverflexbeta.GetUserResponse{
- Id: utils.Ptr(int64(1)),
- Roles: &[]string{},
- Username: nil,
- Host: nil,
- Port: utils.Ptr(int64(2123456789)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringNull(),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Host: types.StringNull(),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- {
- "nil_response_2",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_resource_id",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- tests := []struct {
- description string
- input *resourceModel
- inputRoles []string
- expected *sqlserverflexbeta.CreateUserRequestPayload
- isValid bool
- }{
- {
- "default_values",
- &resourceModel{},
- []string{},
- &sqlserverflexbeta.CreateUserRequestPayload{
- Roles: &[]string{},
- Username: nil,
- },
- true,
- },
- {
- "default_values",
- &resourceModel{
- Username: types.StringValue("username"),
- },
- []string{
- "role_1",
- "role_2",
- },
- &sqlserverflexbeta.CreateUserRequestPayload{
- Roles: &[]string{
- "role_1",
- "role_2",
- },
- Username: utils.Ptr("username"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &resourceModel{
- Username: types.StringNull(),
- },
- []string{
- "",
- },
- &sqlserverflexbeta.CreateUserRequestPayload{
- Roles: &[]string{
- "",
- },
- Username: nil,
- },
- true,
- },
- {
- "nil_model",
- nil,
- []string{},
- nil,
- false,
- },
- {
- "nil_roles",
- &resourceModel{
- Username: types.StringValue("username"),
- },
- []string{},
- &sqlserverflexbeta.CreateUserRequestPayload{
- Roles: &[]string{},
- Username: utils.Ptr("username"),
- },
- true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- output, err := toCreatePayload(tt.input, tt.inputRoles)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(output, tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml
deleted file mode 100644
index 43b029e8..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml
+++ /dev/null
@@ -1,53 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
- - 'RequiresReplace'
-
- - name: 'user_id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'username'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'roles'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'password'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'uri'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'status'
- modifiers:
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go
deleted file mode 100644
index efaf3fc1..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/resource.go
+++ /dev/null
@@ -1,577 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- _ "embed"
- "errors"
- "fmt"
- "net/http"
- "slices"
- "strconv"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils"
- sqlserverflexbetaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/resources_gen"
-)
-
-var (
- _ resource.Resource = &userResource{}
- _ resource.ResourceWithConfigure = &userResource{}
- _ resource.ResourceWithImportState = &userResource{}
- _ resource.ResourceWithModifyPlan = &userResource{}
- _ resource.ResourceWithIdentity = &userResource{}
- _ resource.ResourceWithValidateConfig = &userResource{}
-)
-
-func NewUserResource() resource.Resource {
- return &userResource{}
-}
-
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexbetaResGen.UserModel
-
-// UserResourceIdentityModel describes the resource's identity attributes.
-type UserResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- UserID types.Int64 `tfsdk:"user_id"`
-}
-
-type userResource struct {
- client *sqlserverflexbeta.APIClient
- providerData core.ProviderData
-}
-
-func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *userResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics)
- if resp.Diagnostics.HasError() {
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "SQLServer Beta Flex user client configured")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *userResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var configModel resourceModel
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var planModel resourceModel
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- //// TODO: verify if this is needed - START
- // var planRoles []string
- // diags := planModel.Roles.ElementsAs(ctx, &planRoles, false)
- // resp.Diagnostics.Append(diags...)
- // if diags.HasError() {
- // return
- //}
- // slices.Sort(planRoles)
- // var roles []attr.Value
- // for _, role := range planRoles {
- // roles = append(roles, types.StringValue(string(role)))
- //}
- // rolesSet, diags := types.ListValue(types.StringType, roles)
- // resp.Diagnostics.Append(diags...)
- // if diags.HasError() {
- // return
- //}
- // planModel.Roles = rolesSet
- //// TODO: verify if this is needed - END
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-// Schema defines the schema for the resource.
-func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexbetaResGen.UserResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-// IdentitySchema defines the schema for the resource's identity attributes.
-func (r *userResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- response *resource.IdentitySchemaResponse,
-) {
- response.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "user_id": identityschema.Int64Attribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
-func (r *userResource) ValidateConfig(
- ctx context.Context,
- req resource.ValidateConfigRequest,
- resp *resource.ValidateConfigResponse,
-) {
- var data resourceModel
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var roles []string
- diags := data.Roles.ElementsAs(ctx, &roles, false)
- resp.Diagnostics.Append(diags...)
- if diags.HasError() {
- return
- }
-
- var resRoles []string
- for _, role := range roles {
- if slices.Contains(resRoles, role) {
- resp.Diagnostics.AddAttributeError(
- path.Root("roles"),
- "Attribute Configuration Error",
- "defined roles MUST NOT contain duplicates",
- )
- return
- }
- resRoles = append(resRoles, role)
- }
-}
-
-// Create creates the resource and sets the initial Terraform state.
-func (r *userResource) Create(
- ctx context.Context,
- req resource.CreateRequest,
- resp *resource.CreateResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.Plan.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- region := model.Region.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
-
- var roles []string
- if !model.Roles.IsNull() && !model.Roles.IsUnknown() {
- diags = model.Roles.ElementsAs(ctx, &roles, false)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- slices.Sort(roles)
- }
-
- // Generate API request body from model
- payload, err := toCreatePayload(&model, roles)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err))
- return
- }
- // Create new user
- userResp, err := r.client.CreateUserRequest(
- ctx,
- projectId,
- region,
- instanceId,
- ).CreateUserRequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- if userResp == nil || userResp.Id == nil || *userResp.Id == 0 {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- "API didn't return user Id. A user might have been created",
- )
- return
- }
-
- userId := *userResp.Id
- ctx = tflog.SetField(ctx, "user_id", userId)
-
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- UserID: types.Int64Value(userId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- err = mapFieldsCreate(userResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- waitResp, err := sqlserverflexbetaWait.CreateUserWaitHandler(
- ctx,
- r.client,
- projectId,
- instanceId,
- region,
- userId,
- ).SetSleepBeforeWait(
- 90 * time.Second,
- ).SetTimeout(
- 90 * time.Minute,
- ).WaitWithContext(ctx)
-
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- fmt.Sprintf("Instance creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- "Instance creation waiting: returned id is nil",
- )
- return
- }
-
- // Map response body to schema
- err = mapFields(waitResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
- // Set state to fully populated data
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQLServer Flex user created")
-}
-
-// Read refreshes the Terraform state with the latest data.
-func (r *userResource) Read(
- ctx context.Context,
- req resource.ReadRequest,
- resp *resource.ReadResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- userId := model.UserId.ValueInt64()
- region := r.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
- ctx = tflog.SetField(ctx, "region", region)
-
- recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(
- err,
- &oapiErr,
- )
- //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if ok && oapiErr.StatusCode == http.StatusNotFound {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading user", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapFields(recordSetResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- UserID: types.Int64Value(userId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Set refreshed state
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQLServer Flex user read")
-}
-
-// Update updates the resource and sets the updated Terraform state on success.
-func (r *userResource) Update(
- ctx context.Context,
- _ resource.UpdateRequest,
- resp *resource.UpdateResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // Update shouldn't be called
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error updating user",
- "an SQL server user can not be updated, only created",
- )
-}
-
-// Delete deletes the resource and removes the Terraform state on success.
-func (r *userResource) Delete(
- ctx context.Context,
- req resource.DeleteRequest,
- resp *resource.DeleteResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // Retrieve values from plan
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- userId := model.UserId.ValueInt64()
- region := model.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // Delete existing record set
- // err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- err := r.client.DeleteUserRequestExecute(ctx, projectId, region, instanceId, userId)
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- // TODO err handling
- return
- }
-
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- resp.State.RemoveResource(ctx)
- return
- // case http.StatusInternalServerError:
- // tflog.Warn(ctx, "[delete user] Wait handler got error 500")
- // return false, nil, nil
- default:
- // TODO err handling
- return
- }
- }
- // Delete existing record set
- _, err = sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "SQLServer Flex user deleted")
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *userResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
- req.ID,
- ),
- )
- return
- }
-
- userId, err := strconv.ParseInt(idParts[3], 10, 64)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
-
- tflog.Info(ctx, "SQLServer Flex user state imported")
-
- return
- }
-
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData UserResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
- userId := identityData.UserID.ValueInt64()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
-
- core.LogAndAddWarning(
- ctx,
- &resp.Diagnostics,
- "SQLServer Flex user imported with empty password",
- "The user password is not imported as it is only available upon creation of a new user. The password field will be empty.",
- )
- tflog.Info(ctx, "SQLServer Flex user state imported")
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/utils/util.go b/stackit/internal/services/sqlserverflexbeta/utils/util.go
deleted file mode 100644
index d8ba984b..00000000
--- a/stackit/internal/services/sqlserverflexbeta/utils/util.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package utils
-
-import (
- "context"
- "fmt"
-
- sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-func ConfigureClient(
- ctx context.Context,
- providerData *core.ProviderData,
- diags *diag.Diagnostics,
-) *sqlserverflex.APIClient {
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(providerData.RoundTripper),
- utils.UserAgentConfigOption(providerData.Version),
- }
- if providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(providerData.GetRegion()))
- }
- apiClient, err := sqlserverflex.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- core.LogAndAddError(
- ctx,
- diags,
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return nil
- }
-
- return apiClient
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/utils/util_test.go b/stackit/internal/services/sqlserverflexbeta/utils/util_test.go
deleted file mode 100644
index 92fb1ae9..00000000
--- a/stackit/internal/services/sqlserverflexbeta/utils/util_test.go
+++ /dev/null
@@ -1,98 +0,0 @@
-package utils
-
-import (
- "context"
- "os"
- "reflect"
- "testing"
-
- "github.com/hashicorp/terraform-plugin-framework/diag"
- sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-const (
- testVersion = "1.2.3"
- testCustomEndpoint = "https://sqlserverflex-custom-endpoint.api.stackit.cloud"
-)
-
-func TestConfigureClient(t *testing.T) {
- /* mock authentication by setting service account token env variable */
- os.Clearenv()
- err := os.Setenv(sdkClients.ServiceAccountToken, "mock-val")
- if err != nil {
- t.Errorf("error setting env variable: %v", err)
- }
-
- type args struct {
- providerData *core.ProviderData
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- expected *sqlserverflex.APIClient
- }{
- {
- name: "default endpoint",
- args: args{
- providerData: &core.ProviderData{
- Version: testVersion,
- },
- },
- expected: func() *sqlserverflex.APIClient {
- apiClient, err := sqlserverflex.NewAPIClient(
- config.WithRegion("eu01"),
- utils.UserAgentConfigOption(testVersion),
- )
- if err != nil {
- t.Errorf("error configuring client: %v", err)
- }
- return apiClient
- }(),
- wantErr: false,
- },
- {
- name: "custom endpoint",
- args: args{
- providerData: &core.ProviderData{
- Version: testVersion,
- SQLServerFlexCustomEndpoint: testCustomEndpoint,
- },
- },
- expected: func() *sqlserverflex.APIClient {
- apiClient, err := sqlserverflex.NewAPIClient(
- utils.UserAgentConfigOption(testVersion),
- config.WithEndpoint(testCustomEndpoint),
- )
- if err != nil {
- t.Errorf("error configuring client: %v", err)
- }
- return apiClient
- }(),
- wantErr: false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- ctx := context.Background()
- diags := diag.Diagnostics{}
-
- actual := ConfigureClient(ctx, tt.args.providerData, &diags)
- if diags.HasError() != tt.wantErr {
- t.Errorf("ConfigureClient() error = %v, want %v", diags.HasError(), tt.wantErr)
- }
-
- if !reflect.DeepEqual(actual, tt.expected) {
- t.Errorf("ConfigureClient() = %v, want %v", actual, tt.expected)
- }
- },
- )
- }
-}
diff --git a/stackit/internal/wait/postgresflexalpha/wait.go b/stackit/internal/wait/postgresflexalpha/wait.go
index 8d72f587..978b57a4 100644
--- a/stackit/internal/wait/postgresflexalpha/wait.go
+++ b/stackit/internal/wait/postgresflexalpha/wait.go
@@ -4,13 +4,11 @@ import (
"context"
"errors"
"fmt"
- "math"
"net/http"
"time"
"github.com/hashicorp/terraform-plugin-log/tflog"
-
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/core/wait"
@@ -30,64 +28,55 @@ const (
// APIClientInstanceInterface Interface needed for tests
type APIClientInstanceInterface interface {
- GetInstanceRequestExecute(ctx context.Context, projectId, region, instanceId string) (
- *postgresflex.GetInstanceResponse,
- error,
- )
+ GetInstanceRequest(ctx context.Context, projectId, region, instanceId string) v3alpha1api.ApiGetInstanceRequestRequest
- ListUsersRequestExecute(
+ ListUsersRequest(
ctx context.Context,
projectId string,
region string,
instanceId string,
- ) (*postgresflex.ListUserResponse, error)
+ ) v3alpha1api.ApiListUsersRequestRequest
}
// APIClientUserInterface Interface needed for tests
type APIClientUserInterface interface {
- GetUserRequestExecute(ctx context.Context, projectId, region, instanceId string, userId int32) (
- *postgresflex.GetUserResponse,
- error,
- )
+ GetUserRequest(ctx context.Context, projectId, region, instanceId string, userId int32) v3alpha1api.ApiGetUserRequestRequest
+}
- GetDatabaseRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- databaseId int32,
- ) (*postgresflex.GetDatabaseResponse, error)
+// APIClientDatabaseInterface Interface needed for tests
+type APIClientDatabaseInterface interface {
+ GetDatabaseRequest(ctx context.Context, projectId string, region string, instanceId string, databaseId int32) v3alpha1api.ApiGetDatabaseRequestRequest
}
// CreateInstanceWaitHandler will wait for instance creation
func CreateInstanceWaitHandler(
ctx context.Context, a APIClientInstanceInterface, projectId, region,
instanceId string,
-) *wait.AsyncActionHandler[postgresflex.GetInstanceResponse] {
+) *wait.AsyncActionHandler[v3alpha1api.GetInstanceResponse] {
instanceCreated := false
- var instanceGetResponse *postgresflex.GetInstanceResponse
+ var instanceGetResponse *v3alpha1api.GetInstanceResponse
maxWait := time.Minute * 45
startTime := time.Now()
extendedTimeout := 0
handler := wait.New(
- func() (waitFinished bool, response *postgresflex.GetInstanceResponse, err error) {
+ func() (waitFinished bool, response *v3alpha1api.GetInstanceResponse, err error) {
if !instanceCreated {
- s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
+ s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
return false, nil, err
}
- if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
+ if s == nil || s.Id != instanceId {
return false, nil, nil
}
tflog.Debug(
ctx, "waiting for instance ready", map[string]interface{}{
- "status": *s.Status,
+ "status": s.Status,
},
)
- switch *s.Status {
+ switch s.Status {
default:
- return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, *s.Status)
+ return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, s.Status)
case InstanceStateEmpty:
return false, nil, nil
case InstanceStatePending:
@@ -112,7 +101,7 @@ func CreateInstanceWaitHandler(
extendedTimeout++
if *s.Network.AccessScope == "SNA" {
ready := true
- if s.Network == nil || s.Network.InstanceAddress == nil {
+ if s.Network.InstanceAddress == nil {
tflog.Warn(ctx, "Waiting for instance_address")
ready = false
}
@@ -124,16 +113,12 @@ func CreateInstanceWaitHandler(
return false, nil, nil
}
}
- if s.IsDeletable == nil {
- tflog.Warn(ctx, "Waiting for is_deletable")
- return false, nil, nil
- }
}
instanceCreated = true
instanceGetResponse = s
case InstanceStateSuccess:
- if s.Network != nil && s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" {
+ if s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" {
if s.Network.InstanceAddress == nil {
tflog.Warn(ctx, "Waiting for instance_address")
return false, nil, nil
@@ -156,7 +141,7 @@ func CreateInstanceWaitHandler(
tflog.Info(ctx, "Waiting for instance (calling list users")
// // User operations aren't available right after an instance is deemed successful
// // To check if they are, perform a users request
- _, err = a.ListUsersRequestExecute(ctx, projectId, region, instanceId)
+ _, err = a.ListUsersRequest(ctx, projectId, region, instanceId).Execute()
if err == nil {
return true, instanceGetResponse, nil
}
@@ -175,7 +160,7 @@ func CreateInstanceWaitHandler(
},
)
// Sleep before wait is set because sometimes API returns 404 right after creation request
- handler.SetTimeout(90 * time.Minute).SetSleepBeforeWait(30 * time.Second)
+ // handler.SetTimeout(90 * time.Minute).SetSleepBeforeWait(30 * time.Second)
return handler
}
@@ -183,19 +168,19 @@ func CreateInstanceWaitHandler(
func PartialUpdateInstanceWaitHandler(
ctx context.Context, a APIClientInstanceInterface, projectId, region,
instanceId string,
-) *wait.AsyncActionHandler[postgresflex.GetInstanceResponse] {
+) *wait.AsyncActionHandler[v3alpha1api.GetInstanceResponse] {
handler := wait.New(
- func() (waitFinished bool, response *postgresflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
+ func() (waitFinished bool, response *v3alpha1api.GetInstanceResponse, err error) {
+ s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
return false, nil, err
}
- if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
+ if s == nil || s.Id != instanceId {
return false, nil, nil
}
- switch *s.Status {
+ switch s.Status {
default:
- return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, *s.Status)
+ return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, s.Status)
case InstanceStateEmpty:
return false, nil, nil
case InstanceStatePending:
@@ -213,7 +198,7 @@ func PartialUpdateInstanceWaitHandler(
}
},
)
- handler.SetTimeout(45 * time.Minute).SetSleepBeforeWait(30 * time.Second)
+ // handler.SetTimeout(45 * time.Minute).SetSleepBeforeWait(30 * time.Second)
return handler
}
@@ -222,15 +207,12 @@ func GetUserByIdWaitHandler(
ctx context.Context,
a APIClientUserInterface,
projectId, instanceId, region string,
- userId int64,
-) *wait.AsyncActionHandler[postgresflex.GetUserResponse] {
+ userId int32,
+) *wait.AsyncActionHandler[v3alpha1api.GetUserResponse] {
handler := wait.New(
- func() (waitFinished bool, response *postgresflex.GetUserResponse, err error) {
- if userId > math.MaxInt32 {
- return false, nil, fmt.Errorf("userId value is too big for int32")
- }
- userId32 := int32(userId) //nolint:gosec // we need to convert databaseId to int32 because API expects int32
- s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId32)
+ func() (waitFinished bool, response *v3alpha1api.GetUserResponse, err error) {
+ userId32 := userId
+ s, err := a.GetUserRequest(ctx, projectId, region, instanceId, userId32).Execute()
if err != nil {
var oapiErr *oapierror.GenericOpenAPIError
ok := errors.As(err, &oapiErr)
@@ -259,14 +241,14 @@ func GetUserByIdWaitHandler(
// GetDatabaseByIdWaitHandler will wait for instance creation
func GetDatabaseByIdWaitHandler(
ctx context.Context,
- a APIClientUserInterface,
+ a APIClientDatabaseInterface,
projectId, instanceId, region string,
- databaseId int64,
-) *wait.AsyncActionHandler[postgresflex.GetDatabaseResponse] {
+ databaseId int32,
+) *wait.AsyncActionHandler[v3alpha1api.GetDatabaseResponse] {
handler := wait.New(
- func() (waitFinished bool, response *postgresflex.GetDatabaseResponse, err error) {
- dbId32 := int32(databaseId) //nolint:gosec // we need to convert databaseId to int32 because API expects int32
- s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, dbId32)
+ func() (waitFinished bool, response *v3alpha1api.GetDatabaseResponse, err error) {
+ dbId32 := databaseId
+ s, err := a.GetDatabaseRequest(ctx, projectId, region, instanceId, dbId32).Execute()
if err != nil {
var oapiErr *oapierror.GenericOpenAPIError
ok := errors.As(err, &oapiErr)
@@ -307,8 +289,8 @@ func DeleteInstanceWaitHandler(
timeout, sleepBeforeWait time.Duration,
) error {
handler := wait.New(
- func() (waitFinished bool, response *postgresflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
+ func() (waitFinished bool, response *v3alpha1api.GetInstanceResponse, err error) {
+ s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) // nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if !ok {
@@ -319,7 +301,7 @@ func DeleteInstanceWaitHandler(
}
return false, nil, fmt.Errorf("api returned error: %w", err)
}
- switch *s.Status {
+ switch s.Status {
case InstanceStateDeleted:
return true, nil, nil
case InstanceStateEmpty, InstanceStatePending, InstanceStateUnknown, InstanceStateProgressing, InstanceStateSuccess:
@@ -327,7 +309,7 @@ func DeleteInstanceWaitHandler(
case InstanceStateFailed:
return true, nil, fmt.Errorf("wait handler got status FAILURE for instance: %s", instanceId)
default:
- return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, *s.Status)
+ return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, s.Status)
}
},
).SetTimeout(timeout).SetSleepBeforeWait(sleepBeforeWait)
diff --git a/stackit/internal/wait/postgresflexalpha/wait_test.go b/stackit/internal/wait/postgresflexalpha/wait_test.go
index 57d36175..613a798d 100644
--- a/stackit/internal/wait/postgresflexalpha/wait_test.go
+++ b/stackit/internal/wait/postgresflexalpha/wait_test.go
@@ -11,7 +11,7 @@ import (
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
- postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
)
// Used for testing instance operations
@@ -24,6 +24,12 @@ type apiClientInstanceMocked struct {
usersGetErrorStatus int
}
+func (a *apiClientInstanceMocked) GetInstanceRequest(
+ _ context.Context,
+ _, _, _ string,
+) *postgresflex.ApiGetInstanceRequestRequest {
+}
+
func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
_ context.Context,
_, _, _ string,
@@ -41,9 +47,9 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
}
return &postgresflex.GetInstanceResponse{
- Id: &a.instanceId,
- Status: postgresflex.GetInstanceResponseGetStatusAttributeType(&a.instanceState),
- Network: postgresflex.GetInstanceResponseGetNetworkAttributeType(&a.instanceNetwork),
+ Id: a.instanceId,
+ Status: postgresflex.Status(a.instanceState),
+ Network: a.instanceNetwork,
}, nil
}
@@ -57,12 +63,12 @@ func (a *apiClientInstanceMocked) ListUsersRequestExecute(
}
}
- aux := int64(0)
+ aux := int32(0)
return &postgresflex.ListUserResponse{
- Pagination: &postgresflex.Pagination{
- TotalRows: &aux,
+ Pagination: postgresflex.Pagination{
+ TotalRows: aux,
},
- Users: &[]postgresflex.ListUser{},
+ Users: []postgresflex.ListUser{},
}, nil
}
@@ -88,9 +94,9 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
},
wantErr: false,
wantRes: &postgresflex.GetInstanceResponse{
- Id: utils.Ptr("foo-bar"),
- Status: postgresflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(InstanceStateSuccess)),
- Network: &postgresflex.InstanceNetwork{
+ Id: "foo-bar",
+ Status: InstanceStateSuccess,
+ Network: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -157,9 +163,9 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
usersGetErrorStatus: 400,
wantErr: true,
wantRes: &postgresflex.GetInstanceResponse{
- Id: utils.Ptr("foo-bar"),
- Status: postgresflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(InstanceStateSuccess)),
- Network: &postgresflex.InstanceNetwork{
+ Id: "foo-bar",
+ Status: InstanceStateSuccess,
+ Network: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -172,7 +178,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
instanceGetFails: false,
instanceState: InstanceStateSuccess,
instanceNetwork: postgresflex.InstanceNetwork{
- AccessScope: postgresflex.InstanceNetworkGetAccessScopeAttributeType(utils.Ptr("SNA")),
+ AccessScope: (*postgresflex.InstanceNetworkAccessScope)(utils.Ptr("SNA")),
Acl: nil,
InstanceAddress: nil,
RouterAddress: utils.Ptr("10.0.0.1"),
@@ -185,7 +191,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
instanceGetFails: false,
instanceState: InstanceStateProgressing,
instanceNetwork: postgresflex.InstanceNetwork{
- AccessScope: postgresflex.InstanceNetworkGetAccessScopeAttributeType(utils.Ptr("SNA")),
+ AccessScope: (*postgresflex.InstanceNetworkAccessScope)(utils.Ptr("SNA")),
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
RouterAddress: utils.Ptr("10.0.0.1"),
@@ -199,6 +205,11 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
tt.desc, func(t *testing.T) {
instanceId := "foo-bar"
+ apiClientMock := postgresflex.DefaultAPIServiceMock{
+ CreateInstanceRequestExecuteMock: nil,
+ GetInstanceRequestExecuteMock: nil,
+ }
+
apiClient := &apiClientInstanceMocked{
instanceId: instanceId,
instanceState: tt.instanceState,
@@ -207,7 +218,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
usersGetErrorStatus: tt.usersGetErrorStatus,
}
- handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", "", instanceId)
+ handler := CreateInstanceWaitHandler(context.Background(), apiClientMock, "", "", instanceId)
gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
if (err != nil) != tt.wantErr {
diff --git a/stackit/internal/wait/sqlserverflexalpha/wait.go b/stackit/internal/wait/sqlserverflexalpha/wait.go
deleted file mode 100644
index 712347d1..00000000
--- a/stackit/internal/wait/sqlserverflexalpha/wait.go
+++ /dev/null
@@ -1,392 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- "errors"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
- "github.com/stackitcloud/stackit-sdk-go/core/wait"
-
- sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-)
-
-// READY, PENDING, PROGRESSING, FAILURE, UNKNOWN,
-const (
- InstanceStateEmpty = ""
- InstanceStateSuccess = "READY"
- InstanceStatePending = "PENDING"
- InstanceStateProcessing = "PROGRESSING"
- InstanceStateFailed = "FAILURE"
- InstanceStateUnknown = "UNKNOWN"
- InstanceStateTerminating = "TERMINATING"
-)
-
-// APIClientInterface Interface needed for tests
-type APIClientInterface interface {
- GetInstanceRequestExecute(
- ctx context.Context,
- projectId, region, instanceId string,
- ) (*sqlserverflex.GetInstanceResponse, error)
- GetDatabaseRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- databaseName string,
- ) (*sqlserverflex.GetDatabaseResponse, error)
- GetUserRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- userId int64,
- ) (*sqlserverflex.GetUserResponse, error)
-
- ListRolesRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) (*sqlserverflex.ListRolesResponse, error)
-
- ListUsersRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) sqlserverflex.ApiListUsersRequestRequest
-
- ListUsersRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) (*sqlserverflex.ListUserResponse, error)
-}
-
-// APIClientUserInterface Interface needed for tests
-type APIClientUserInterface interface {
- DeleteUserRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- userId int64,
- ) error
-}
-
-// CreateInstanceWaitHandler will wait for instance creation
-func CreateInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
- if err != nil {
- return false, nil, err
- }
- if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
- return false, nil, nil
- }
- switch strings.ToLower(string(*s.Status)) {
- case strings.ToLower(InstanceStateSuccess):
- if s.Network != nil && s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" {
- if s.Network.InstanceAddress == nil {
- tflog.Info(ctx, "Waiting for instance_address")
- return false, nil, nil
- }
- if s.Network.RouterAddress == nil {
- tflog.Info(ctx, "Waiting for router_address")
- return false, nil, nil
- }
- }
-
- tflog.Info(ctx, "trying to get roles")
- time.Sleep(10 * time.Second)
- _, rolesErr := a.ListRolesRequestExecute(ctx, projectId, region, instanceId)
- if rolesErr != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(rolesErr, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusInternalServerError {
- tflog.Info(
- ctx, "got error from api", map[string]interface{}{
- "error": rolesErr.Error(),
- },
- )
- return false, nil, rolesErr
- }
- tflog.Info(
- ctx, "wait for get-roles to work hack", map[string]interface{}{},
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- }
-
- tflog.Info(ctx, "trying to get users")
- time.Sleep(10 * time.Second)
- _, usersErr := a.ListUsersRequestExecute(ctx, projectId, region, instanceId)
- if usersErr != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(usersErr, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusInternalServerError {
- tflog.Info(
- ctx, "got error from api", map[string]interface{}{
- "error": rolesErr.Error(),
- },
- )
- return false, nil, usersErr
- }
- tflog.Info(
- ctx, "wait for get-users to work hack", map[string]interface{}{},
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- }
- return true, s, nil
- case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
- return true, nil, fmt.Errorf("create failed for instance with id %s", instanceId)
- case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
- tflog.Info(
- ctx, "request is being handled", map[string]interface{}{
- "status": *s.Status,
- },
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- default:
- tflog.Info(
- ctx, "Wait (create) received unknown status", map[string]interface{}{
- "instanceId": instanceId,
- "status": s.Status,
- },
- )
- return true, nil, errors.New("unknown status received")
- }
- },
- )
- return handler
-}
-
-// UpdateInstanceWaitHandler will wait for instance update
-func UpdateInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
- if err != nil {
- return false, nil, err
- }
- if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
- return false, nil, nil
- }
- switch strings.ToLower(string(*s.Status)) {
- case strings.ToLower(InstanceStateSuccess):
- return true, s, nil
- case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
- return true, s, fmt.Errorf("update failed for instance with id %s", instanceId)
- case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
- tflog.Info(
- ctx, "request is being handled", map[string]interface{}{
- "status": *s.Status,
- },
- )
- return false, s, nil
- default:
- tflog.Info(
- ctx, "Wait (update) received unknown status", map[string]interface{}{
- "instanceId": instanceId,
- "status": s.Status,
- },
- )
- return false, s, nil
- }
- },
- )
- return handler
-}
-
-// DeleteInstanceWaitHandler will wait for instance deletion
-func DeleteInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
- if err == nil {
- return false, s, nil
- }
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return true, nil, nil
- },
- )
- handler.SetTimeout(30 * time.Minute)
- return handler
-}
-
-// CreateDatabaseWaitHandler will wait for instance creation
-func CreateDatabaseWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region, databaseName string,
-) *wait.AsyncActionHandler[sqlserverflex.GetDatabaseResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetDatabaseResponse, err error) {
- s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName)
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf(
- "get database - could not convert error to oapierror.GenericOpenAPIError: %s",
- err.Error(),
- )
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return false, nil, nil
- }
- if s == nil || s.Name == nil || *s.Name != databaseName {
- return false, nil, errors.New("response did return different result")
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-// CreateUserWaitHandler will wait for instance creation
-func CreateUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
- userId int64,
-) *wait.AsyncActionHandler[sqlserverflex.GetUserResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetUserResponse, err error) {
- s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId)
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return false, nil, nil
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-// WaitForUserWaitHandler will wait for instance creation
-func WaitForUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region, userName string,
-) *wait.AsyncActionHandler[sqlserverflex.ListUserResponse] {
- startTime := time.Now()
- timeOut := 2 * time.Minute
-
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.ListUserResponse, err error) {
- if time.Since(startTime) > timeOut {
- return false, nil, errors.New("ran into timeout")
- }
- s, err := a.ListUsersRequest(ctx, projectId, region, instanceId).Size(100).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf(
- "wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s",
- err.Error(),
- )
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- tflog.Info(
- ctx, "Wait (list users) still waiting", map[string]interface{}{},
- )
-
- return false, nil, nil
- }
- users, ok := s.GetUsersOk()
- if !ok {
- return false, nil, errors.New("no users found")
- }
-
- for _, u := range users {
- if u.GetUsername() == userName {
- return true, s, nil
- }
- }
- tflog.Info(
- ctx, "Wait (list users) user still not present", map[string]interface{}{},
- )
- return false, nil, nil
- },
- )
- return handler
-}
-
-// DeleteUserWaitHandler will wait for instance deletion
-func DeleteUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, region, instanceId string,
- userId int64,
-) *wait.AsyncActionHandler[struct{}] {
- handler := wait.New(
- func() (waitFinished bool, response *struct{}, err error) {
- _, err = a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId)
- if err == nil {
- return false, nil, nil
- }
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
-
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- return true, nil, nil
- default:
- return false, nil, err
- }
- },
- )
- handler.SetTimeout(15 * time.Minute)
- handler.SetSleepBeforeWait(15 * time.Second)
- return handler
-}
diff --git a/stackit/internal/wait/sqlserverflexalpha/wait_test.go b/stackit/internal/wait/sqlserverflexalpha/wait_test.go
deleted file mode 100644
index ca84ad1e..00000000
--- a/stackit/internal/wait/sqlserverflexalpha/wait_test.go
+++ /dev/null
@@ -1,344 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
- "reflect"
- "testing"
- "time"
-
- "github.com/google/go-cmp/cmp"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-)
-
-// Used for testing instance operations
-type apiClientInstanceMocked struct {
- instanceId string
- instanceState string
- instanceNetwork sqlserverflex.InstanceNetwork
- instanceIsDeleted bool
- instanceGetFails bool
-}
-
-type ListUsersRequestRequest struct{}
-
-func (l ListUsersRequestRequest) Page(_ int64) sqlserverflex.ApiListUsersRequestRequest {
- return l
-}
-
-func (l ListUsersRequestRequest) Size(_ int64) sqlserverflex.ApiListUsersRequestRequest {
- return l
-}
-
-func (l ListUsersRequestRequest) Sort(_ sqlserverflex.UserSort) sqlserverflex.ApiListUsersRequestRequest {
- return l
-}
-
-func (l ListUsersRequestRequest) Execute() (*sqlserverflex.ListUserResponse, error) {
- // TODO implement me
- panic("implement me")
-}
-
-func (a *apiClientInstanceMocked) ListUsersRequest(
- _ context.Context,
- _ string,
- _ string,
- _ string,
-) sqlserverflex.ApiListUsersRequestRequest {
- return ListUsersRequestRequest{}
-}
-
-func (a *apiClientInstanceMocked) ListRolesRequestExecute(
- _ context.Context,
- _ string,
- _ string,
- _ string,
-) (*sqlserverflex.ListRolesResponse, error) {
- return &sqlserverflex.ListRolesResponse{
- Roles: &[]string{},
- }, nil
-}
-
-func (a *apiClientInstanceMocked) ListUsersRequestExecute(
- _ context.Context,
- _ string,
- _ string,
- _ string,
-) (*sqlserverflex.ListUserResponse, error) {
- return &sqlserverflex.ListUserResponse{
- Pagination: nil,
- Users: nil,
- }, nil
-}
-
-func (a *apiClientInstanceMocked) GetDatabaseRequestExecute(
- _ context.Context,
- _ string,
- _ string,
- _ string,
- _ string,
-) (*sqlserverflex.GetDatabaseResponse, error) {
- return nil, nil
-}
-
-func (a *apiClientInstanceMocked) GetUserRequestExecute(
- _ context.Context,
- _ string,
- _ string,
- _ string,
- _ int64,
-) (*sqlserverflex.GetUserResponse, error) {
- return nil, nil
-}
-
-func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
- _ context.Context,
- _, _, _ string,
-) (*sqlserverflex.GetInstanceResponse, error) {
- if a.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
-
- if a.instanceIsDeleted {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 404,
- }
- }
-
- return &sqlserverflex.GetInstanceResponse{
- Id: &a.instanceId,
- Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(&a.instanceState),
- Network: &a.instanceNetwork,
- }, nil
-}
-func TestCreateInstanceWaitHandler(t *testing.T) {
- instanceId := utils.Ptr("foo")
- tests := []struct {
- desc string
- instanceId string
- instanceGetFails bool
- instanceState string
- instanceNetwork sqlserverflex.InstanceNetwork
- usersGetErrorStatus int
- wantErr bool
- wantRes *sqlserverflex.GetInstanceResponse
- }{
- //{
- // desc: "create_succeeded",
- // instanceId: *instanceId,
- // instanceGetFails: false,
- // instanceState: *stateSuccess,
- // instanceNetwork: sqlserverflex.InstanceNetwork{
- // AccessScope: nil,
- // Acl: nil,
- // InstanceAddress: utils.Ptr("10.0.0.1"),
- // RouterAddress: utils.Ptr("10.0.0.2"),
- // },
- // wantErr: false,
- // wantRes: &sqlserverflex.GetInstanceResponse{
- // BackupSchedule: nil,
- // Edition: nil,
- // Encryption: nil,
- // FlavorId: nil,
- // Id: instanceId,
- // IsDeletable: nil,
- // Name: nil,
- // Network: &sqlserverflex.InstanceNetwork{
- // AccessScope: nil,
- // Acl: nil,
- // InstanceAddress: utils.Ptr("10.0.0.1"),
- // RouterAddress: utils.Ptr("10.0.0.2"),
- // },
- // Replicas: nil,
- // RetentionDays: nil,
- // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(stateSuccess),
- // Storage: nil,
- // Version: nil,
- // },
- // },
- {
- desc: "create_failed",
- instanceId: *instanceId,
- instanceGetFails: false,
- instanceState: InstanceStateFailed,
- wantErr: true,
- wantRes: nil,
- },
- {
- desc: "create_failed_2",
- instanceId: *instanceId,
- instanceGetFails: false,
- instanceState: InstanceStateEmpty,
- wantErr: true,
- wantRes: nil,
- },
- {
- desc: "instance_get_fails",
- instanceId: *instanceId,
- instanceGetFails: true,
- wantErr: true,
- wantRes: nil,
- },
- {
- desc: "timeout",
- instanceId: *instanceId,
- instanceGetFails: false,
- instanceState: InstanceStateProcessing,
- wantErr: true,
- wantRes: nil,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- apiClient := &apiClientInstanceMocked{
- instanceId: tt.instanceId,
- instanceState: tt.instanceState,
- instanceGetFails: tt.instanceGetFails,
- }
-
- handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", tt.instanceId, "")
-
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
-
- if !reflect.DeepEqual(gotRes, tt.wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes)
- }
- },
- )
- }
-}
-
-func TestUpdateInstanceWaitHandler(t *testing.T) {
- t.Skip("skipping - needs refactoring")
- tests := []struct {
- desc string
- instanceGetFails bool
- instanceState string
- wantErr bool
- wantResp bool
- }{
- {
- desc: "update_succeeded",
- instanceGetFails: false,
- instanceState: InstanceStateSuccess,
- wantErr: false,
- wantResp: true,
- },
- {
- desc: "update_failed",
- instanceGetFails: false,
- instanceState: InstanceStateFailed,
- wantErr: true,
- wantResp: true,
- },
- {
- desc: "update_failed_2",
- instanceGetFails: false,
- instanceState: InstanceStateEmpty,
- wantErr: true,
- wantResp: true,
- },
- {
- desc: "get_fails",
- instanceGetFails: true,
- wantErr: true,
- wantResp: false,
- },
- {
- desc: "timeout",
- instanceGetFails: false,
- instanceState: InstanceStateProcessing,
- wantErr: true,
- wantResp: true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- instanceId := "foo-bar"
-
- apiClient := &apiClientInstanceMocked{
- instanceId: instanceId,
- instanceState: tt.instanceState,
- instanceGetFails: tt.instanceGetFails,
- }
-
- var wantRes *sqlserverflex.GetInstanceResponse
- if tt.wantResp {
- wantRes = &sqlserverflex.GetInstanceResponse{
- Id: &instanceId,
- Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)),
- }
- }
-
- handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
-
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
-
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- if !cmp.Equal(gotRes, wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes)
- }
- },
- )
- }
-}
-
-func TestDeleteInstanceWaitHandler(t *testing.T) {
- tests := []struct {
- desc string
- instanceGetFails bool
- instanceState string
- wantErr bool
- }{
- {
- desc: "delete_succeeded",
- instanceGetFails: false,
- instanceState: InstanceStateSuccess,
- wantErr: false,
- },
- {
- desc: "delete_failed",
- instanceGetFails: false,
- instanceState: InstanceStateFailed,
- wantErr: true,
- },
- {
- desc: "get_fails",
- instanceGetFails: true,
- wantErr: true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- instanceId := "foo-bar"
-
- apiClient := &apiClientInstanceMocked{
- instanceGetFails: tt.instanceGetFails,
- instanceIsDeleted: tt.instanceState == InstanceStateSuccess,
- instanceId: instanceId,
- instanceState: tt.instanceState,
- }
-
- handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
-
- _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background())
-
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- },
- )
- }
-}
diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go
deleted file mode 100644
index 2660cac5..00000000
--- a/stackit/internal/wait/sqlserverflexbeta/wait.go
+++ /dev/null
@@ -1,412 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "errors"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
- "github.com/stackitcloud/stackit-sdk-go/core/wait"
-
- sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-)
-
-// READY, PENDING, PROGRESSING, FAILURE, UNKNOWN,
-const (
- InstanceStateEmpty = ""
- InstanceStateSuccess = "READY"
- InstanceStatePending = "PENDING"
- InstanceStateProcessing = "PROGRESSING"
- InstanceStateFailed = "FAILURE"
- InstanceStateUnknown = "UNKNOWN"
- InstanceStateTerminating = "TERMINATING"
-)
-
-// APIClientInterface Interface needed for tests
-type APIClientInterface interface {
- GetInstanceRequestExecute(
- ctx context.Context,
- projectId, region, instanceId string,
- ) (*sqlserverflex.GetInstanceResponse, error)
- GetDatabaseRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- databaseName string,
- ) (*sqlserverflex.GetDatabaseResponse, error)
- GetUserRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- userId int64,
- ) (*sqlserverflex.GetUserResponse, error)
-
- ListRolesRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) (*sqlserverflex.ListRolesResponse, error)
-
- ListUsersRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) sqlserverflex.ApiListUsersRequestRequest
-
- ListUsersRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) (*sqlserverflex.ListUserResponse, error)
-}
-
-// APIClientUserInterface Interface needed for tests
-type APIClientUserInterface interface {
- DeleteUserRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- userId int64,
- ) error
-}
-
-// CreateInstanceWaitHandler will wait for instance creation
-func CreateInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError: %w", err)
- }
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- return false, nil, nil
- default:
- return false, nil, fmt.Errorf("api error: %w", err)
- }
- }
- if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
- return false, nil, nil
- }
- switch strings.ToLower(string(*s.Status)) {
- case strings.ToLower(InstanceStateSuccess):
- if s.Network != nil && s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" {
- if s.Network.InstanceAddress == nil {
- tflog.Info(ctx, "Waiting for instance_address")
- return false, nil, nil
- }
- if s.Network.RouterAddress == nil {
- tflog.Info(ctx, "Waiting for router_address")
- return false, nil, nil
- }
- }
-
- tflog.Info(ctx, "trying to get roles")
- time.Sleep(10 * time.Second)
- _, rolesErr := a.ListRolesRequestExecute(ctx, projectId, region, instanceId)
- if rolesErr != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(rolesErr, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusInternalServerError {
- tflog.Info(
- ctx, "got error from api", map[string]interface{}{
- "error": rolesErr.Error(),
- },
- )
- return false, nil, rolesErr
- }
- tflog.Info(
- ctx, "wait for get-roles to work hack", map[string]interface{}{},
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- }
-
- tflog.Info(ctx, "trying to get users")
- time.Sleep(10 * time.Second)
- _, usersErr := a.ListUsersRequestExecute(ctx, projectId, region, instanceId)
- if usersErr != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(usersErr, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusInternalServerError {
- tflog.Info(
- ctx, "got error from api", map[string]interface{}{
- "error": rolesErr.Error(),
- },
- )
- return false, nil, usersErr
- }
- tflog.Info(
- ctx, "wait for get-users to work hack", map[string]interface{}{},
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- }
- return true, s, nil
- case strings.ToLower(InstanceStateUnknown):
- return true, nil, fmt.Errorf(
- "create failed for instance %s with status %s",
- instanceId,
- InstanceStateUnknown,
- )
- case strings.ToLower(InstanceStateFailed):
- return true, nil, fmt.Errorf(
- "create failed for instance %s with status %s",
- instanceId,
- InstanceStateFailed,
- )
- case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
- tflog.Info(
- ctx, "request is being handled", map[string]interface{}{
- "status": *s.Status,
- },
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- default:
- tflog.Info(
- ctx, "Wait (create) received unknown status", map[string]interface{}{
- "instanceId": instanceId,
- "status": s.Status,
- },
- )
- return true, nil, errors.New("unknown status received")
- }
- },
- )
- return handler
-}
-
-// UpdateInstanceWaitHandler will wait for instance update
-func UpdateInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
- if err != nil {
- return false, nil, err
- }
- if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
- return false, nil, nil
- }
- switch strings.ToLower(string(*s.Status)) {
- case strings.ToLower(InstanceStateSuccess):
- return true, s, nil
- case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
- return true, s, fmt.Errorf("update failed for instance with id %s", instanceId)
- case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
- tflog.Info(
- ctx, "request is being handled", map[string]interface{}{
- "status": *s.Status,
- },
- )
- return false, s, nil
- default:
- tflog.Info(
- ctx, "Wait (update) received unknown status", map[string]interface{}{
- "instanceId": instanceId,
- "status": s.Status,
- },
- )
- return false, s, nil
- }
- },
- )
- return handler
-}
-
-// DeleteInstanceWaitHandler will wait for instance deletion
-func DeleteInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
- if err == nil {
- return false, s, nil
- }
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return true, nil, nil
- },
- )
- handler.SetTimeout(30 * time.Minute)
- return handler
-}
-
-// CreateDatabaseWaitHandler will wait for instance creation
-func CreateDatabaseWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region, databaseName string,
-) *wait.AsyncActionHandler[sqlserverflex.GetDatabaseResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetDatabaseResponse, err error) {
- s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName)
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf(
- "get database - could not convert error to oapierror.GenericOpenAPIError: %s",
- err.Error(),
- )
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return false, nil, nil
- }
- if s == nil || s.Name == nil || *s.Name != databaseName {
- return false, nil, errors.New("response did return different result")
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-// CreateUserWaitHandler will wait for instance creation
-func CreateUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
- userId int64,
-) *wait.AsyncActionHandler[sqlserverflex.GetUserResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetUserResponse, err error) {
- s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId)
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return false, nil, nil
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-// WaitForUserWaitHandler will wait for instance creation
-func WaitForUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region, userName string,
-) *wait.AsyncActionHandler[sqlserverflex.ListUserResponse] {
- startTime := time.Now()
- timeOut := 2 * time.Minute
-
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.ListUserResponse, err error) {
- if time.Since(startTime) > timeOut {
- return false, nil, errors.New("ran into timeout")
- }
- s, err := a.ListUsersRequest(ctx, projectId, region, instanceId).Size(100).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf(
- "wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s",
- err.Error(),
- )
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- tflog.Info(
- ctx, "Wait (list users) still waiting", map[string]interface{}{},
- )
-
- return false, nil, nil
- }
- users, ok := s.GetUsersOk()
- if !ok {
- return false, nil, errors.New("no users found")
- }
-
- for _, u := range users {
- if u.GetUsername() == userName {
- return true, s, nil
- }
- }
- tflog.Info(
- ctx, "Wait (list users) user still not present", map[string]interface{}{},
- )
- return false, nil, nil
- },
- )
- return handler
-}
-
-// DeleteUserWaitHandler will wait for instance deletion
-func DeleteUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, region, instanceId string,
- userId int64,
-) *wait.AsyncActionHandler[struct{}] {
- handler := wait.New(
- func() (waitFinished bool, response *struct{}, err error) {
- _, err = a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId)
- if err == nil {
- return false, nil, nil
- }
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
-
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- return true, nil, nil
- default:
- return false, nil, err
- }
- },
- )
- handler.SetTimeout(15 * time.Minute)
- handler.SetSleepBeforeWait(15 * time.Second)
- return handler
-}
diff --git a/stackit/internal/wait/sqlserverflexbeta/wait_test.go b/stackit/internal/wait/sqlserverflexbeta/wait_test.go
deleted file mode 100644
index 0d10abae..00000000
--- a/stackit/internal/wait/sqlserverflexbeta/wait_test.go
+++ /dev/null
@@ -1,344 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "reflect"
- "testing"
- "time"
-
- "github.com/google/go-cmp/cmp"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
-)
-
-// Used for testing instance operations
-type apiClientInstanceMocked struct {
- instanceId string
- instanceState string
- instanceNetwork sqlserverflex.InstanceNetwork
- instanceIsDeleted bool
- instanceGetFails bool
-}
-
-type ListUsersRequestRequest struct{}
-
-func (l ListUsersRequestRequest) Page(_ int64) sqlserverflex.ApiListUsersRequestRequest {
- return l
-}
-
-func (l ListUsersRequestRequest) Size(_ int64) sqlserverflex.ApiListUsersRequestRequest {
- return l
-}
-
-func (l ListUsersRequestRequest) Sort(_ sqlserverflex.UserSort) sqlserverflex.ApiListUsersRequestRequest {
- return l
-}
-
-func (l ListUsersRequestRequest) Execute() (*sqlserverflex.ListUserResponse, error) {
- // TODO implement me
- panic("implement me")
-}
-
-func (a *apiClientInstanceMocked) ListUsersRequest(
- _ context.Context,
- _ string,
- _ string,
- _ string,
-) sqlserverflex.ApiListUsersRequestRequest {
- return ListUsersRequestRequest{}
-}
-
-func (a *apiClientInstanceMocked) ListRolesRequestExecute(
- _ context.Context,
- _ string,
- _ string,
- _ string,
-) (*sqlserverflex.ListRolesResponse, error) {
- return &sqlserverflex.ListRolesResponse{
- Roles: &[]string{},
- }, nil
-}
-
-func (a *apiClientInstanceMocked) ListUsersRequestExecute(
- _ context.Context,
- _ string,
- _ string,
- _ string,
-) (*sqlserverflex.ListUserResponse, error) {
- return &sqlserverflex.ListUserResponse{
- Pagination: nil,
- Users: nil,
- }, nil
-}
-
-func (a *apiClientInstanceMocked) GetDatabaseRequestExecute(
- _ context.Context,
- _ string,
- _ string,
- _ string,
- _ string,
-) (*sqlserverflex.GetDatabaseResponse, error) {
- return nil, nil
-}
-
-func (a *apiClientInstanceMocked) GetUserRequestExecute(
- _ context.Context,
- _ string,
- _ string,
- _ string,
- _ int64,
-) (*sqlserverflex.GetUserResponse, error) {
- return nil, nil
-}
-
-func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
- _ context.Context,
- _, _, _ string,
-) (*sqlserverflex.GetInstanceResponse, error) {
- if a.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
-
- if a.instanceIsDeleted {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 404,
- }
- }
-
- return &sqlserverflex.GetInstanceResponse{
- Id: &a.instanceId,
- Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(&a.instanceState),
- Network: &a.instanceNetwork,
- }, nil
-}
-func TestCreateInstanceWaitHandler(t *testing.T) {
- instanceId := utils.Ptr("foo")
- tests := []struct {
- desc string
- instanceId string
- instanceGetFails bool
- instanceState string
- instanceNetwork sqlserverflex.InstanceNetwork
- usersGetErrorStatus int
- wantErr bool
- wantRes *sqlserverflex.GetInstanceResponse
- }{
- //{
- // desc: "create_succeeded",
- // instanceId: *instanceId,
- // instanceGetFails: false,
- // instanceState: *stateSuccess,
- // instanceNetwork: sqlserverflex.InstanceNetwork{
- // AccessScope: nil,
- // Acl: nil,
- // InstanceAddress: utils.Ptr("10.0.0.1"),
- // RouterAddress: utils.Ptr("10.0.0.2"),
- // },
- // wantErr: false,
- // wantRes: &sqlserverflex.GetInstanceResponse{
- // BackupSchedule: nil,
- // Edition: nil,
- // Encryption: nil,
- // FlavorId: nil,
- // Id: instanceId,
- // IsDeletable: nil,
- // Name: nil,
- // Network: &sqlserverflex.InstanceNetwork{
- // AccessScope: nil,
- // Acl: nil,
- // InstanceAddress: utils.Ptr("10.0.0.1"),
- // RouterAddress: utils.Ptr("10.0.0.2"),
- // },
- // Replicas: nil,
- // RetentionDays: nil,
- // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(stateSuccess),
- // Storage: nil,
- // Version: nil,
- // },
- // },
- {
- desc: "create_failed",
- instanceId: *instanceId,
- instanceGetFails: false,
- instanceState: InstanceStateFailed,
- wantErr: true,
- wantRes: nil,
- },
- {
- desc: "create_failed_2",
- instanceId: *instanceId,
- instanceGetFails: false,
- instanceState: InstanceStateEmpty,
- wantErr: true,
- wantRes: nil,
- },
- {
- desc: "instance_get_fails",
- instanceId: *instanceId,
- instanceGetFails: true,
- wantErr: true,
- wantRes: nil,
- },
- {
- desc: "timeout",
- instanceId: *instanceId,
- instanceGetFails: false,
- instanceState: InstanceStateProcessing,
- wantErr: true,
- wantRes: nil,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- apiClient := &apiClientInstanceMocked{
- instanceId: tt.instanceId,
- instanceState: tt.instanceState,
- instanceGetFails: tt.instanceGetFails,
- }
-
- handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", tt.instanceId, "")
-
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
-
- if !reflect.DeepEqual(gotRes, tt.wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes)
- }
- },
- )
- }
-}
-
-func TestUpdateInstanceWaitHandler(t *testing.T) {
- t.Skip("skipping - needs refactoring")
- tests := []struct {
- desc string
- instanceGetFails bool
- instanceState string
- wantErr bool
- wantResp bool
- }{
- {
- desc: "update_succeeded",
- instanceGetFails: false,
- instanceState: InstanceStateSuccess,
- wantErr: false,
- wantResp: true,
- },
- {
- desc: "update_failed",
- instanceGetFails: false,
- instanceState: InstanceStateFailed,
- wantErr: true,
- wantResp: true,
- },
- {
- desc: "update_failed_2",
- instanceGetFails: false,
- instanceState: InstanceStateEmpty,
- wantErr: true,
- wantResp: true,
- },
- {
- desc: "get_fails",
- instanceGetFails: true,
- wantErr: true,
- wantResp: false,
- },
- {
- desc: "timeout",
- instanceGetFails: false,
- instanceState: InstanceStateProcessing,
- wantErr: true,
- wantResp: true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- instanceId := "foo-bar"
-
- apiClient := &apiClientInstanceMocked{
- instanceId: instanceId,
- instanceState: tt.instanceState,
- instanceGetFails: tt.instanceGetFails,
- }
-
- var wantRes *sqlserverflex.GetInstanceResponse
- if tt.wantResp {
- wantRes = &sqlserverflex.GetInstanceResponse{
- Id: &instanceId,
- Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)),
- }
- }
-
- handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
-
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
-
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- if !cmp.Equal(gotRes, wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes)
- }
- },
- )
- }
-}
-
-func TestDeleteInstanceWaitHandler(t *testing.T) {
- tests := []struct {
- desc string
- instanceGetFails bool
- instanceState string
- wantErr bool
- }{
- {
- desc: "delete_succeeded",
- instanceGetFails: false,
- instanceState: InstanceStateSuccess,
- wantErr: false,
- },
- {
- desc: "delete_failed",
- instanceGetFails: false,
- instanceState: InstanceStateFailed,
- wantErr: true,
- },
- {
- desc: "get_fails",
- instanceGetFails: true,
- wantErr: true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- instanceId := "foo-bar"
-
- apiClient := &apiClientInstanceMocked{
- instanceGetFails: tt.instanceGetFails,
- instanceIsDeleted: tt.instanceState == InstanceStateSuccess,
- instanceId: instanceId,
- instanceState: tt.instanceState,
- }
-
- handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
-
- _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background())
-
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- },
- )
- }
-}
diff --git a/stackit/provider.go b/stackit/provider.go
index 086ae003..e6e201a8 100644
--- a/stackit/provider.go
+++ b/stackit/provider.go
@@ -29,16 +29,14 @@ import (
postgresflexalphaFlavors "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors"
postgresFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
postgresFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user"
-
- sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database"
- sqlserverFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor"
- sqlServerFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
- sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user"
-
- sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database"
- sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor"
- sqlserverflexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance"
- sqlserverFlexBetaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user"
+ //sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database"
+ //sqlserverFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor"
+ //sqlServerFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
+ //sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user"
+ //sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database"
+ //sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor"
+ //sqlserverflexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance"
+ //sqlserverFlexBetaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user"
)
// Ensure the implementation satisfies the expected interfaces
@@ -533,15 +531,15 @@ func (p *Provider) DataSources(_ context.Context) []func() datasource.DataSource
postgresFlexAlphaUser.NewUserDataSource,
postgresflexalphaFlavors.NewFlavorsDataSource,
- sqlserverFlexAlphaFlavor.NewFlavorDataSource,
- sqlServerFlexAlphaInstance.NewInstanceDataSource,
- sqlserverFlexAlphaUser.NewUserDataSource,
- sqlserverflexalphaDatabase.NewDatabaseDataSource,
+ //sqlserverFlexAlphaFlavor.NewFlavorDataSource,
+ //sqlServerFlexAlphaInstance.NewInstanceDataSource,
+ //sqlserverFlexAlphaUser.NewUserDataSource,
+ //sqlserverflexalphaDatabase.NewDatabaseDataSource,
- sqlserverflexBetaDatabase.NewDatabaseDataSource,
- sqlserverflexBetaInstance.NewInstanceDataSource,
- sqlserverFlexBetaUser.NewUserDataSource,
- sqlserverFlexBetaFlavor.NewFlavorDataSource,
+ //sqlserverflexBetaDatabase.NewDatabaseDataSource,
+ //sqlserverflexBetaInstance.NewInstanceDataSource,
+ //sqlserverFlexBetaUser.NewUserDataSource,
+ //sqlserverFlexBetaFlavor.NewFlavorDataSource,
}
}
@@ -552,13 +550,13 @@ func (p *Provider) Resources(_ context.Context) []func() resource.Resource {
postgresFlexAlphaUser.NewUserResource,
postgresFlexAlphaDatabase.NewDatabaseResource,
- sqlServerFlexAlphaInstance.NewInstanceResource,
- sqlserverFlexAlphaUser.NewUserResource,
- sqlserverflexalphaDatabase.NewDatabaseResource,
+ //sqlServerFlexAlphaInstance.NewInstanceResource,
+ //sqlserverFlexAlphaUser.NewUserResource,
+ //sqlserverflexalphaDatabase.NewDatabaseResource,
- sqlserverflexBetaInstance.NewInstanceResource,
- sqlserverFlexBetaUser.NewUserResource,
- sqlserverflexBetaDatabase.NewDatabaseResource,
+ //sqlserverflexBetaInstance.NewInstanceResource,
+ //sqlserverFlexBetaUser.NewUserResource,
+ //sqlserverflexBetaDatabase.NewDatabaseResource,
}
return resources
}
diff --git a/stackit/provider_acc_test.go b/stackit/provider_acc_test.go
index b9424b3e..ded698d7 100644
--- a/stackit/provider_acc_test.go
+++ b/stackit/provider_acc_test.go
@@ -18,7 +18,8 @@ import (
"github.com/stackitcloud/stackit-sdk-go/core/clients"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+ postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
+
postgresFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor"
"github.com/hashicorp/terraform-plugin-framework/datasource"
@@ -29,14 +30,14 @@ import (
postgresflexalphaFlavors "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors"
postgresFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
postgresFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user"
- sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database"
- sqlserverFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor"
- sqlserverFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
- sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user"
- sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database"
- sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor"
- sqlserverFlexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance"
- sqlserverFlexBetaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user"
+ //sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database"
+ //sqlserverFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor"
+ //sqlserverFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
+ //sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user"
+ //sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database"
+ //sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor"
+ //sqlserverFlexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance"
+ //sqlserverFlexBetaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
@@ -175,15 +176,15 @@ func TestUnitProviderHasChildDataSources_Basic(t *testing.T) {
postgresFlexAlphaUser.NewUserDataSource(),
postgresflexalphaFlavors.NewFlavorsDataSource(),
- sqlserverFlexAlphaFlavor.NewFlavorDataSource(),
- sqlserverFlexAlphaInstance.NewInstanceDataSource(),
- sqlserverFlexAlphaUser.NewUserDataSource(),
- sqlserverflexalphaDatabase.NewDatabaseDataSource(),
+ //sqlserverFlexAlphaFlavor.NewFlavorDataSource(),
+ //sqlserverFlexAlphaInstance.NewInstanceDataSource(),
+ //sqlserverFlexAlphaUser.NewUserDataSource(),
+ //sqlserverflexalphaDatabase.NewDatabaseDataSource(),
- sqlserverflexBetaDatabase.NewDatabaseDataSource(),
- sqlserverFlexBetaInstance.NewInstanceDataSource(),
- sqlserverFlexBetaUser.NewUserDataSource(),
- sqlserverFlexBetaFlavor.NewFlavorDataSource(),
+ //sqlserverflexBetaDatabase.NewDatabaseDataSource(),
+ //sqlserverFlexBetaInstance.NewInstanceDataSource(),
+ //sqlserverFlexBetaUser.NewUserDataSource(),
+ //sqlserverFlexBetaFlavor.NewFlavorDataSource(),
}
provider, ok := stackit.New("testing")().(*stackit.Provider)
if !ok {
@@ -212,13 +213,13 @@ func TestUnitProviderHasChildResources_Basic(t *testing.T) {
postgresFlexAlphaUser.NewUserResource(),
postgresFlexAlphaDatabase.NewDatabaseResource(),
- sqlserverFlexAlphaInstance.NewInstanceResource(),
- sqlserverFlexAlphaUser.NewUserResource(),
- sqlserverflexalphaDatabase.NewDatabaseResource(),
+ //sqlserverFlexAlphaInstance.NewInstanceResource(),
+ //sqlserverFlexAlphaUser.NewUserResource(),
+ //sqlserverflexalphaDatabase.NewDatabaseResource(),
- sqlserverFlexBetaInstance.NewInstanceResource(),
- sqlserverFlexBetaUser.NewUserResource(),
- sqlserverflexBetaDatabase.NewDatabaseResource(),
+ //sqlserverFlexBetaInstance.NewInstanceResource(),
+ //sqlserverFlexBetaUser.NewUserResource(),
+ //sqlserverflexBetaDatabase.NewDatabaseResource(),
}
provider, ok := stackit.New("testing")().(*stackit.Provider)
if !ok {