chore: work save
Some checks failed
CI Workflow / Check GoReleaser config (pull_request) Successful in 4s
CI Workflow / Test readiness for publishing provider (pull_request) Failing after 3m57s
CI Workflow / CI run tests (pull_request) Failing after 5m5s
CI Workflow / CI run build and linting (pull_request) Failing after 4m50s
CI Workflow / Code coverage report (pull_request) Has been skipped
Some checks failed
CI Workflow / Check GoReleaser config (pull_request) Successful in 4s
CI Workflow / Test readiness for publishing provider (pull_request) Failing after 3m57s
CI Workflow / CI run tests (pull_request) Failing after 5m5s
CI Workflow / CI run build and linting (pull_request) Failing after 4m50s
CI Workflow / Code coverage report (pull_request) Has been skipped
This commit is contained in:
parent
411e99739a
commit
d6d3a795bb
118 changed files with 3101 additions and 18065 deletions
38
docs/data-sources/postgresflexalpha_database.md
Normal file
38
docs/data-sources/postgresflexalpha_database.md
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_database Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_database (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
database_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `database_id` (Number) The ID of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `tf_original_api_id` (Number) The id of the database.
|
||||||
54
docs/data-sources/postgresflexalpha_flavor.md
Normal file
54
docs/data-sources/postgresflexalpha_flavor.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_flavor Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_flavor (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or replica
|
||||||
|
- `project_id` (String) The cpu count of the instance.
|
||||||
|
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `region` (String) The flavor description.
|
||||||
|
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `flavor_id` (String) The flavor id of the instance flavor.
|
||||||
|
- `id` (String) The terraform id of the instance flavor.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `storage_classes` (Attributes List) (see [below for nested schema](#nestedatt--storage_classes))
|
||||||
|
|
||||||
|
<a id="nestedatt--storage_classes"></a>
|
||||||
|
### Nested Schema for `storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
68
docs/data-sources/postgresflexalpha_flavors.md
Normal file
68
docs/data-sources/postgresflexalpha_flavors.md
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_flavors Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_flavors (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the flavors to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `flavors` (Attributes List) List of flavors available for the project. (see [below for nested schema](#nestedatt--flavors))
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
|
||||||
|
<a id="nestedatt--flavors"></a>
|
||||||
|
### Nested Schema for `flavors`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `memory` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or replica
|
||||||
|
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--flavors--storage_classes))
|
||||||
|
- `tf_original_api_id` (String) The id of the instance flavor.
|
||||||
|
|
||||||
|
<a id="nestedatt--flavors--storage_classes"></a>
|
||||||
|
### Nested Schema for `flavors.storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
95
docs/data-sources/postgresflexalpha_instance.md
Normal file
95
docs/data-sources/postgresflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,95 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_instance Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_instance (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
|
||||||
|
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
|
||||||
|
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||||
|
|
||||||
|
⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
||||||
|
- `status` (String) The current status of the instance.
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info"></a>
|
||||||
|
### Nested Schema for `connection_info`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info--write"></a>
|
||||||
|
### Nested Schema for `connection_info.write`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance.
|
||||||
|
- `port` (Number) The port of the instance.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The encryption-key key identifier
|
||||||
|
- `kek_key_ring_id` (String) The encryption-key keyring identifier
|
||||||
|
- `kek_key_version` (String) The encryption-key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `performance_class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
42
docs/data-sources/postgresflexalpha_user.md
Normal file
42
docs/data-sources/postgresflexalpha_user.md
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `name` (String) The name of the user.
|
||||||
|
- `roles` (List of String) A list of user roles.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
32
docs/data-sources/sqlserverflexalpha_database.md
Normal file
32
docs/data-sources/sqlserverflexalpha_database.md
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_database Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_database (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (String) The terraform internal identifier.
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `tf_original_api_id` (Number) The id of the database.
|
||||||
54
docs/data-sources/sqlserverflexalpha_flavor.md
Normal file
54
docs/data-sources/sqlserverflexalpha_flavor.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_flavor Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_flavor (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or HA
|
||||||
|
- `project_id` (String) The project ID of the flavor.
|
||||||
|
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `region` (String) The region of the flavor.
|
||||||
|
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `id` (String) The id of the instance flavor.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
|
||||||
|
|
||||||
|
<a id="nestedatt--storage_classes"></a>
|
||||||
|
### Nested Schema for `storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
77
docs/data-sources/sqlserverflexalpha_instance.md
Normal file
77
docs/data-sources/sqlserverflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_instance (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `status` (String)
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
62
docs/data-sources/sqlserverflexalpha_user.md
Normal file
62
docs/data-sources/sqlserverflexalpha_user.md
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the users to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--users"></a>
|
||||||
|
### Nested Schema for `users`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
40
docs/data-sources/sqlserverflexbeta_database.md
Normal file
40
docs/data-sources/sqlserverflexbeta_database.md
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_database Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_database (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
database_name = "dbname"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (String) The terraform internal identifier.
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `tf_original_api_id` (Number) The id of the database.
|
||||||
54
docs/data-sources/sqlserverflexbeta_flavor.md
Normal file
54
docs/data-sources/sqlserverflexbeta_flavor.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or HA
|
||||||
|
- `project_id` (String) The project ID of the flavor.
|
||||||
|
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `region` (String) The region of the flavor.
|
||||||
|
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `id` (String) The id of the instance flavor.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
|
||||||
|
|
||||||
|
<a id="nestedatt--storage_classes"></a>
|
||||||
|
### Nested Schema for `storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
77
docs/data-sources/sqlserverflexbeta_instance.md
Normal file
77
docs/data-sources/sqlserverflexbeta_instance.md
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_instance Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_instance (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `status` (String)
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
54
docs/data-sources/sqlserverflexbeta_user.md
Normal file
54
docs/data-sources/sqlserverflexbeta_user.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the users to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--users"></a>
|
||||||
|
### Nested Schema for `users`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
83
docs/index.md
Normal file
83
docs/index.md
Normal file
|
|
@ -0,0 +1,83 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview Provider"
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview Provider
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key_path = "service_account.json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Authentication
|
||||||
|
|
||||||
|
# Key flow
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key = var.service_account_key
|
||||||
|
private_key = var.private_key
|
||||||
|
}
|
||||||
|
|
||||||
|
# Key flow (using path)
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key_path = var.service_account_key_path
|
||||||
|
private_key_path = var.private_key_path
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `authorization_custom_endpoint` (String) Custom endpoint for the Membership service
|
||||||
|
- `cdn_custom_endpoint` (String) Custom endpoint for the CDN service
|
||||||
|
- `credentials_path` (String) Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.
|
||||||
|
- `default_region` (String) Region will be used as the default location for regional services. Not all services require a region, some are global
|
||||||
|
- `dns_custom_endpoint` (String) Custom endpoint for the DNS service
|
||||||
|
- `enable_beta_resources` (Boolean) Enable beta resources. Default is false.
|
||||||
|
- `experiments` (List of String) Enables experiments. These are unstable features without official support. More information can be found in the README. Available Experiments: iam, routing-tables, network
|
||||||
|
- `git_custom_endpoint` (String) Custom endpoint for the Git service
|
||||||
|
- `iaas_custom_endpoint` (String) Custom endpoint for the IaaS service
|
||||||
|
- `kms_custom_endpoint` (String) Custom endpoint for the KMS service
|
||||||
|
- `loadbalancer_custom_endpoint` (String) Custom endpoint for the Load Balancer service
|
||||||
|
- `logme_custom_endpoint` (String) Custom endpoint for the LogMe service
|
||||||
|
- `mariadb_custom_endpoint` (String) Custom endpoint for the MariaDB service
|
||||||
|
- `modelserving_custom_endpoint` (String) Custom endpoint for the AI Model Serving service
|
||||||
|
- `mongodbflex_custom_endpoint` (String) Custom endpoint for the MongoDB Flex service
|
||||||
|
- `objectstorage_custom_endpoint` (String) Custom endpoint for the Object Storage service
|
||||||
|
- `observability_custom_endpoint` (String) Custom endpoint for the Observability service
|
||||||
|
- `opensearch_custom_endpoint` (String) Custom endpoint for the OpenSearch service
|
||||||
|
- `postgresflex_custom_endpoint` (String) Custom endpoint for the PostgresFlex service
|
||||||
|
- `private_key` (String) Private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
|
||||||
|
- `private_key_path` (String) Path for the private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
|
||||||
|
- `rabbitmq_custom_endpoint` (String) Custom endpoint for the RabbitMQ service
|
||||||
|
- `redis_custom_endpoint` (String) Custom endpoint for the Redis service
|
||||||
|
- `region` (String, Deprecated) Region will be used as the default location for regional services. Not all services require a region, some are global
|
||||||
|
- `resourcemanager_custom_endpoint` (String) Custom endpoint for the Resource Manager service
|
||||||
|
- `scf_custom_endpoint` (String) Custom endpoint for the Cloud Foundry (SCF) service
|
||||||
|
- `secretsmanager_custom_endpoint` (String) Custom endpoint for the Secrets Manager service
|
||||||
|
- `server_backup_custom_endpoint` (String) Custom endpoint for the Server Backup service
|
||||||
|
- `server_update_custom_endpoint` (String) Custom endpoint for the Server Update service
|
||||||
|
- `service_account_custom_endpoint` (String) Custom endpoint for the Service Account service
|
||||||
|
- `service_account_email` (String, Deprecated) Service account email. It can also be set using the environment variable STACKIT_SERVICE_ACCOUNT_EMAIL. It is required if you want to use the resource manager project resource.
|
||||||
|
- `service_account_key` (String) Service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||||
|
- `service_account_key_path` (String) Path for the service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||||
|
- `service_account_token` (String, Deprecated) Token used for authentication. If set, the token flow will be used to authenticate all operations.
|
||||||
|
- `service_enablement_custom_endpoint` (String) Custom endpoint for the Service Enablement API
|
||||||
|
- `ske_custom_endpoint` (String) Custom endpoint for the Kubernetes Engine (SKE) service
|
||||||
|
- `sqlserverflex_custom_endpoint` (String) Custom endpoint for the SQL Server Flex service
|
||||||
|
- `token_custom_endpoint` (String) Custom endpoint for the token API, which is used to request access tokens when using the key flow
|
||||||
57
docs/resources/postgresflexalpha_database.md
Normal file
57
docs/resources/postgresflexalpha_database.md
Normal file
|
|
@ -0,0 +1,57 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_database Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_database (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "mydb"
|
||||||
|
owner = "myusername"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing postgresflex database
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project_id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance_id"
|
||||||
|
database_id = "database_id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `database_id` (Number) The ID of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (Number) The id of the database.
|
||||||
138
docs/resources/postgresflexalpha_instance.md
Normal file
138
docs/resources/postgresflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,138 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_instance Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_instance (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 30
|
||||||
|
flavor_id = "flavor.id"
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
encryption = {
|
||||||
|
kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_version = 1
|
||||||
|
service_account = "service@account.email"
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
version = 17
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing postgresflex instance
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_instance.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.postgres_instance_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_instance.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
instance_id = var.postgres_instance_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||||
|
|
||||||
|
⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
|
||||||
|
- `id` (String) The ID of the instance.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `status` (String) The current status of the instance.
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `performance_class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The encryption-key key identifier
|
||||||
|
- `kek_key_ring_id` (String) The encryption-key keyring identifier
|
||||||
|
- `kek_key_version` (String) The encryption-key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info"></a>
|
||||||
|
### Nested Schema for `connection_info`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info--write"></a>
|
||||||
|
### Nested Schema for `connection_info.write`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance.
|
||||||
|
- `port` (Number) The port of the instance.
|
||||||
59
docs/resources/postgresflexalpha_user.md
Normal file
59
docs/resources/postgresflexalpha_user.md
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing postgresflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
user_id = "user.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance.
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
63
docs/resources/sqlserverflexalpha_database.md
Normal file
63
docs/resources/sqlserverflexalpha_database.md
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_database Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_database (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
collation = ""
|
||||||
|
compatibility = "160"
|
||||||
|
name = ""
|
||||||
|
owner = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import a existing sqlserverflex database
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
database_id = "database.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (Number) The id of the database.
|
||||||
103
docs/resources/sqlserverflexalpha_instance.md
Normal file
103
docs/resources/sqlserverflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,103 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_instance (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
backup_schedule = "00 00 * * *"
|
||||||
|
flavor = {
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
}
|
||||||
|
storage = {
|
||||||
|
class = "class"
|
||||||
|
size = 5
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex instance
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `id` (String) The ID of the instance.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `status` (String)
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
53
docs/resources/sqlserverflexalpha_user.md
Normal file
53
docs/resources/sqlserverflexalpha_user.md
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `default_database` (String) The default database for a user of the instance.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance in which the user belongs to.
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `port` (Number) The port of the instance in which the user belongs to.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `uri` (String) The connection string for the user to the instance.
|
||||||
51
docs/resources/sqlserverflexbeta_database.md
Normal file
51
docs/resources/sqlserverflexbeta_database.md
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_database Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_database (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (Number) The id of the database.
|
||||||
158
docs/resources/sqlserverflexbeta_instance.md
Normal file
158
docs/resources/sqlserverflexbeta_instance.md
Normal file
|
|
@ -0,0 +1,158 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_instance Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_instance (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
# without encryption and SNA
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = "flavor_id"
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# without encryption and PUBLIC
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = "flavor_id"
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# with encryption and SNA
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = "flavor_id"
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
encryption = {
|
||||||
|
kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_version = 1
|
||||||
|
service_account = "service_account@email"
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex instance
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# import with identity
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
instance_id = var.sql_instance_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `id` (String) The ID of the instance.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `status` (String)
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
53
docs/resources/sqlserverflexbeta_user.md
Normal file
53
docs/resources/sqlserverflexbeta_user.md
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `default_database` (String) The default database for a user of the instance.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance in which the user belongs to.
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `port` (Number) The port of the instance in which the user belongs to.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `uri` (String) The connection string for the user to the instance.
|
||||||
File diff suppressed because it is too large
Load diff
119
generator/cmd/build/functions.go
Normal file
119
generator/cmd/build/functions.go
Normal file
|
|
@ -0,0 +1,119 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
func FileExists(pathValue string) bool {
|
||||||
|
_, err := os.Stat(pathValue)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func ucfirst(s string) string {
|
||||||
|
if s == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.ToUpper(s[:1]) + s[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
|
||||||
|
fn := template.FuncMap{
|
||||||
|
"ucfirst": ucfirst,
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var f *os.File
|
||||||
|
f, err = os.Create(outFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = tmpl.Execute(f, *data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = f.Close()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func deleteFiles(fNames ...string) error {
|
||||||
|
for _, fName := range fNames {
|
||||||
|
if _, err := os.Stat(fName); !os.IsNotExist(err) {
|
||||||
|
err = os.Remove(fName)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyFile(src, dst string) (int64, error) {
|
||||||
|
sourceFileStat, err := os.Stat(src)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !sourceFileStat.Mode().IsRegular() {
|
||||||
|
return 0, fmt.Errorf("%s is not a regular file", src)
|
||||||
|
}
|
||||||
|
|
||||||
|
source, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
defer func(source *os.File) {
|
||||||
|
err := source.Close()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("copyFile", "err", err)
|
||||||
|
}
|
||||||
|
}(source)
|
||||||
|
|
||||||
|
destination, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
defer func(destination *os.File) {
|
||||||
|
err := destination.Close()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("copyFile", "err", err)
|
||||||
|
}
|
||||||
|
}(destination)
|
||||||
|
nBytes, err := io.Copy(destination, source)
|
||||||
|
return nBytes, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkCommands(commands []string) error {
|
||||||
|
for _, commandName := range commands {
|
||||||
|
if !commandExists(commandName) {
|
||||||
|
return fmt.Errorf("missing command %s", commandName)
|
||||||
|
}
|
||||||
|
slog.Info(" found", "command", commandName)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func commandExists(cmd string) bool {
|
||||||
|
_, err := exec.LookPath(cmd)
|
||||||
|
return err == nil
|
||||||
|
}
|
||||||
446
generator/cmd/build/oas-handler.go
Normal file
446
generator/cmd/build/oas-handler.go
Normal file
|
|
@ -0,0 +1,446 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
|
"github.com/ldez/go-git-cmd-wrapper/v2/clone"
|
||||||
|
"github.com/ldez/go-git-cmd-wrapper/v2/git"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
OasRepoName = "stackit-api-specifications"
|
||||||
|
OasRepo = "https://github.com/stackitcloud/stackit-api-specifications.git"
|
||||||
|
|
||||||
|
ResTypeResource = "resources"
|
||||||
|
ResTypeDataSource = "datasources"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Data struct {
|
||||||
|
ServiceName string `yaml:",omitempty" json:",omitempty"`
|
||||||
|
Versions []Version `yaml:"versions" json:"versions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Version struct {
|
||||||
|
Name string `yaml:"name" json:"name"`
|
||||||
|
Path string `yaml:"path" json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var oasTempDir string
|
||||||
|
|
||||||
|
func (b *Builder) oasHandler(specDir string) error {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info("creating schema files", "dir", specDir)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(specDir); os.IsNotExist(err) {
|
||||||
|
return fmt.Errorf("spec files directory does not exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
err := b.createRepoDir(b.SkipClone)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
err2 := b.handleServices(specDir)
|
||||||
|
if err2 != nil {
|
||||||
|
return err2
|
||||||
|
}
|
||||||
|
|
||||||
|
if !b.SkipCleanup {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info("Finally removing temporary files and directories")
|
||||||
|
}
|
||||||
|
err := os.RemoveAll(path.Join(b.rootDir, "generated"))
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("RemoveAll", "dir", path.Join(b.rootDir, "generated"), "err", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.RemoveAll(oasTempDir)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("RemoveAll", "dir", oasTempDir, "err", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) handleServices(specDir string) error {
|
||||||
|
services, err := os.ReadDir(specDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, svc := range services {
|
||||||
|
if !svc.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(" ... found", "service", svc.Name())
|
||||||
|
}
|
||||||
|
var svcVersions Data
|
||||||
|
svcVersions.ServiceName = svc.Name()
|
||||||
|
|
||||||
|
versionsErr := b.getServiceVersions(path.Join(specDir, svc.Name(), "generator_settings.yml"), &svcVersions)
|
||||||
|
if versionsErr != nil {
|
||||||
|
return versionsErr
|
||||||
|
}
|
||||||
|
|
||||||
|
oasSpecErr := b.generateServiceFiles(&svcVersions)
|
||||||
|
if oasSpecErr != nil {
|
||||||
|
return oasSpecErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) getServiceVersions(confFile string, data *Data) error {
|
||||||
|
if _, cfgFileErr := os.Stat(confFile); os.IsNotExist(cfgFileErr) {
|
||||||
|
return fmt.Errorf("config file does not exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
fileContent, fileErr := os.ReadFile(confFile)
|
||||||
|
if fileErr != nil {
|
||||||
|
return fileErr
|
||||||
|
}
|
||||||
|
convErr := yaml.Unmarshal(fileContent, &data)
|
||||||
|
if convErr != nil {
|
||||||
|
return convErr
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) createRepoDir(skipClone bool) error {
|
||||||
|
tmpDirName, err := os.MkdirTemp("", "oasbuild")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
oasTempDir = path.Join(tmpDirName, OasRepoName)
|
||||||
|
slog.Info("Creating oas repo dir", "dir", oasTempDir)
|
||||||
|
if !skipClone {
|
||||||
|
if FileExists(oasTempDir) {
|
||||||
|
slog.Warn("target dir exists - skipping", "targetDir", oasTempDir)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out, cloneErr := git.Clone(
|
||||||
|
clone.Repository(OasRepo),
|
||||||
|
clone.Directory(oasTempDir),
|
||||||
|
)
|
||||||
|
if cloneErr != nil {
|
||||||
|
slog.Error("git clone error", "output", out)
|
||||||
|
return cloneErr
|
||||||
|
}
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info("git clone result", "output", out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) generateServiceFiles(data *Data) error {
|
||||||
|
err := os.MkdirAll(path.Join(b.rootDir, "generated", "specs"), 0o750)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, v := range data.Versions {
|
||||||
|
specFiles, specsErr := os.ReadDir(path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name))
|
||||||
|
if specsErr != nil {
|
||||||
|
return specsErr
|
||||||
|
}
|
||||||
|
for _, specFile := range specFiles {
|
||||||
|
if specFile.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r := regexp.MustCompile(`^(.*)_config.yml$`)
|
||||||
|
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
|
||||||
|
if matches == nil {
|
||||||
|
slog.Warn(" skipping file (no regex match)", "file", specFile.Name())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
srcSpecFile := path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name, specFile.Name())
|
||||||
|
|
||||||
|
if matches[0][0] != specFile.Name() {
|
||||||
|
return fmt.Errorf("matched filename differs from original filename - this should not happen")
|
||||||
|
}
|
||||||
|
resource := matches[0][1]
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(
|
||||||
|
" found service spec",
|
||||||
|
"service",
|
||||||
|
data.ServiceName,
|
||||||
|
"resource",
|
||||||
|
resource,
|
||||||
|
"file",
|
||||||
|
specFile.Name(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
oasFile := path.Join(
|
||||||
|
oasTempDir,
|
||||||
|
"services",
|
||||||
|
data.ServiceName,
|
||||||
|
v.Path,
|
||||||
|
fmt.Sprintf("%s.json", data.ServiceName),
|
||||||
|
)
|
||||||
|
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
|
||||||
|
slog.Warn(
|
||||||
|
" could not find matching oas",
|
||||||
|
"svc",
|
||||||
|
data.ServiceName,
|
||||||
|
"version",
|
||||||
|
v.Name,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// determine correct target service name
|
||||||
|
scName := fmt.Sprintf("%s%s", data.ServiceName, v.Name)
|
||||||
|
scName = strings.ReplaceAll(scName, "-", "")
|
||||||
|
|
||||||
|
specJSONFile := path.Join(
|
||||||
|
b.rootDir,
|
||||||
|
"generated",
|
||||||
|
"specs",
|
||||||
|
fmt.Sprintf("%s_%s_spec.json", scName, resource),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmdErr := b.runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile)
|
||||||
|
if cmdErr != nil {
|
||||||
|
return cmdErr
|
||||||
|
}
|
||||||
|
|
||||||
|
cmdResGenErr := b.runTerraformPluginGenFramework(ResTypeResource, scName, resource, specJSONFile)
|
||||||
|
if cmdResGenErr != nil {
|
||||||
|
return cmdResGenErr
|
||||||
|
}
|
||||||
|
|
||||||
|
cmdDsGenErr := b.runTerraformPluginGenFramework(ResTypeDataSource, scName, resource, specJSONFile)
|
||||||
|
if cmdDsGenErr != nil {
|
||||||
|
return cmdDsGenErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) runTerraformPluginGenFramework(resType, svcName, resource, specJSONFile string) error {
|
||||||
|
var stdOut, stdErr bytes.Buffer
|
||||||
|
tgtFolder := path.Join(
|
||||||
|
b.rootDir,
|
||||||
|
"stackit",
|
||||||
|
"internal",
|
||||||
|
"services",
|
||||||
|
svcName,
|
||||||
|
resource,
|
||||||
|
fmt.Sprintf("%s_gen", resType),
|
||||||
|
)
|
||||||
|
|
||||||
|
//nolint:gosec // this file is not sensitive, so we can use 0755
|
||||||
|
err := os.MkdirAll(tgtFolder, 0o755)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var subCmd string
|
||||||
|
switch resType {
|
||||||
|
case ResTypeResource:
|
||||||
|
subCmd = "resources"
|
||||||
|
case ResTypeDataSource:
|
||||||
|
subCmd = "data-sources"
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unknown resource type given: %s", resType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
||||||
|
cmd := exec.Command(
|
||||||
|
"tfplugingen-framework",
|
||||||
|
"generate",
|
||||||
|
subCmd,
|
||||||
|
"--input",
|
||||||
|
specJSONFile,
|
||||||
|
"--output",
|
||||||
|
tgtFolder,
|
||||||
|
"--package",
|
||||||
|
svcName,
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd.Stdout = &stdOut
|
||||||
|
cmd.Stderr = &stdErr
|
||||||
|
if err = cmd.Start(); err != nil {
|
||||||
|
slog.Error(fmt.Sprintf("tfplugingen-framework generate %s", resType), "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = cmd.Wait(); err != nil {
|
||||||
|
var exitErr *exec.ExitError
|
||||||
|
if errors.As(err, &exitErr) {
|
||||||
|
slog.Error(
|
||||||
|
fmt.Sprintf("tfplugingen-framework generate %s", resType),
|
||||||
|
"code",
|
||||||
|
exitErr.ExitCode(),
|
||||||
|
"error",
|
||||||
|
err,
|
||||||
|
"stdout",
|
||||||
|
stdOut.String(),
|
||||||
|
"stderr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return fmt.Errorf("%s", stdErr.String())
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
slog.Error(
|
||||||
|
fmt.Sprintf("tfplugingen-framework generate %s", resType),
|
||||||
|
"err",
|
||||||
|
err,
|
||||||
|
"stdout",
|
||||||
|
stdOut.String(),
|
||||||
|
"stderr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if resType == ResTypeDataSource {
|
||||||
|
tfAnoErr := b.handleTfTagForDatasourceFile(
|
||||||
|
path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
|
||||||
|
svcName,
|
||||||
|
resource,
|
||||||
|
)
|
||||||
|
if tfAnoErr != nil {
|
||||||
|
return tfAnoErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile string) error {
|
||||||
|
var stdOut, stdErr bytes.Buffer
|
||||||
|
|
||||||
|
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
||||||
|
cmd := exec.Command(
|
||||||
|
"tfplugingen-openapi",
|
||||||
|
"generate",
|
||||||
|
"--config",
|
||||||
|
srcSpecFile,
|
||||||
|
"--output",
|
||||||
|
specJSONFile,
|
||||||
|
oasFile,
|
||||||
|
)
|
||||||
|
cmd.Stdout = &stdOut
|
||||||
|
cmd.Stderr = &stdErr
|
||||||
|
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
slog.Error(
|
||||||
|
"tfplugingen-openapi generate",
|
||||||
|
"error",
|
||||||
|
err,
|
||||||
|
"stdOut",
|
||||||
|
stdOut.String(),
|
||||||
|
"stdErr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := cmd.Wait(); err != nil {
|
||||||
|
var exitErr *exec.ExitError
|
||||||
|
if errors.As(err, &exitErr) {
|
||||||
|
slog.Error(
|
||||||
|
"tfplugingen-openapi generate",
|
||||||
|
"code",
|
||||||
|
exitErr.ExitCode(),
|
||||||
|
"error",
|
||||||
|
err,
|
||||||
|
"stdout",
|
||||||
|
stdOut.String(),
|
||||||
|
"stderr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return fmt.Errorf("%s", stdErr.String())
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
slog.Error(
|
||||||
|
"tfplugingen-openapi generate",
|
||||||
|
"err",
|
||||||
|
err,
|
||||||
|
"stdout",
|
||||||
|
stdOut.String(),
|
||||||
|
"stderr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if stdOut.Len() > 0 {
|
||||||
|
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
|
||||||
|
func (b *Builder) handleTfTagForDatasourceFile(filePath, service, resource string) error {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
|
||||||
|
}
|
||||||
|
if !FileExists(filePath) {
|
||||||
|
slog.Warn(" could not find file, skipping", "path", filePath)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
f, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
tmp, err := os.CreateTemp(b.rootDir, "replace-*")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
sc := bufio.NewScanner(f)
|
||||||
|
for sc.Scan() {
|
||||||
|
resLine, err := handleLine(sc.Text())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := tmp.WriteString(resLine + "\n"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if scErr := sc.Err(); scErr != nil {
|
||||||
|
return scErr
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tmp.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := f.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
//nolint:gosec // path traversal is not a concern here
|
||||||
|
if err := os.Rename(tmp.Name(), filePath); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
29
go.mod
29
go.mod
|
|
@ -2,20 +2,30 @@ module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stac
|
||||||
|
|
||||||
go 1.25.6
|
go 1.25.6
|
||||||
|
|
||||||
|
replace (
|
||||||
|
github.com/stackitcloud/stackit-sdk-go => ../stackit-sdk-generator/sdk-repo-updated
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/postgresflex => ../stackit-sdk-generator/sdk-repo-updated/services/postgresflex
|
||||||
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/SladkyCitron/slogcolor v1.8.0
|
||||||
github.com/golang-jwt/jwt/v5 v5.3.1
|
github.com/golang-jwt/jwt/v5 v5.3.1
|
||||||
github.com/google/go-cmp v0.7.0
|
github.com/google/go-cmp v0.7.0
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
|
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0
|
||||||
github.com/hashicorp/terraform-plugin-framework v1.18.0
|
github.com/hashicorp/terraform-plugin-framework v1.18.0
|
||||||
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
|
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
|
||||||
github.com/hashicorp/terraform-plugin-go v0.30.0
|
github.com/hashicorp/terraform-plugin-go v0.30.0
|
||||||
github.com/hashicorp/terraform-plugin-log v0.10.0
|
github.com/hashicorp/terraform-plugin-log v0.10.0
|
||||||
github.com/hashicorp/terraform-plugin-testing v1.14.0
|
github.com/hashicorp/terraform-plugin-testing v1.14.0
|
||||||
github.com/iancoleman/strcase v0.3.0
|
github.com/iancoleman/strcase v0.3.0
|
||||||
|
github.com/ivanpirog/coloredcobra v1.0.1
|
||||||
github.com/jarcoal/httpmock v1.4.1
|
github.com/jarcoal/httpmock v1.4.1
|
||||||
github.com/joho/godotenv v1.5.1
|
github.com/joho/godotenv v1.5.1
|
||||||
|
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1
|
||||||
|
github.com/spf13/cobra v1.4.0
|
||||||
github.com/stackitcloud/stackit-sdk-go/core v0.22.0
|
github.com/stackitcloud/stackit-sdk-go/core v0.22.0
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha
|
github.com/stackitcloud/stackit-sdk-go/services/postgresflex v0.0.0-00010101000000-000000000000
|
||||||
github.com/teambition/rrule-go v1.8.2
|
github.com/teambition/rrule-go v1.8.2
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
@ -25,10 +35,9 @@ require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
|
||||||
require (
|
require (
|
||||||
dario.cat/mergo v1.0.1 // indirect
|
dario.cat/mergo v1.0.1 // indirect
|
||||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||||
github.com/Masterminds/semver/v3 v3.2.1 // indirect
|
github.com/Masterminds/semver/v3 v3.2.0 // indirect
|
||||||
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
|
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
|
||||||
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
||||||
github.com/SladkyCitron/slogcolor v1.8.0 // indirect
|
|
||||||
github.com/agext/levenshtein v1.2.3 // indirect
|
github.com/agext/levenshtein v1.2.3 // indirect
|
||||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||||
github.com/armon/go-radix v1.0.0 // indirect
|
github.com/armon/go-radix v1.0.0 // indirect
|
||||||
|
|
@ -54,19 +63,14 @@ require (
|
||||||
github.com/hashicorp/logutils v1.0.0 // indirect
|
github.com/hashicorp/logutils v1.0.0 // indirect
|
||||||
github.com/hashicorp/terraform-exec v0.25.0 // indirect
|
github.com/hashicorp/terraform-exec v0.25.0 // indirect
|
||||||
github.com/hashicorp/terraform-json v0.27.2 // indirect
|
github.com/hashicorp/terraform-json v0.27.2 // indirect
|
||||||
github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1 // indirect
|
github.com/hashicorp/terraform-plugin-codegen-spec v0.1.0 // indirect
|
||||||
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0 // indirect
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 // indirect
|
|
||||||
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect
|
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect
|
||||||
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
|
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
|
||||||
github.com/hashicorp/terraform-svchost v0.2.0 // indirect
|
github.com/hashicorp/terraform-svchost v0.2.0 // indirect
|
||||||
github.com/hashicorp/yamux v0.1.2 // indirect
|
github.com/hashicorp/yamux v0.1.2 // indirect
|
||||||
github.com/huandu/xstrings v1.4.0 // indirect
|
github.com/huandu/xstrings v1.4.0 // indirect
|
||||||
github.com/imdario/mergo v0.3.16 // indirect
|
github.com/imdario/mergo v0.3.15 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||||
github.com/ivanpirog/coloredcobra v1.0.1 // indirect
|
|
||||||
github.com/kr/text v0.2.0 // indirect
|
|
||||||
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1 // indirect
|
|
||||||
github.com/mailru/easyjson v0.7.7 // indirect
|
github.com/mailru/easyjson v0.7.7 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
|
@ -79,8 +83,7 @@ require (
|
||||||
github.com/pb33f/libopenapi v0.15.0 // indirect
|
github.com/pb33f/libopenapi v0.15.0 // indirect
|
||||||
github.com/posener/complete v1.2.3 // indirect
|
github.com/posener/complete v1.2.3 // indirect
|
||||||
github.com/shopspring/decimal v1.3.1 // indirect
|
github.com/shopspring/decimal v1.3.1 // indirect
|
||||||
github.com/spf13/cast v1.5.1 // indirect
|
github.com/spf13/cast v1.5.0 // indirect
|
||||||
github.com/spf13/cobra v1.4.0 // indirect
|
|
||||||
github.com/spf13/pflag v1.0.5 // indirect
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
github.com/stretchr/testify v1.11.1 // indirect
|
github.com/stretchr/testify v1.11.1 // indirect
|
||||||
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
|
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
|
||||||
|
|
@ -88,7 +91,7 @@ require (
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
|
github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
|
||||||
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
||||||
github.com/zclconf/go-cty v1.17.0 // indirect
|
github.com/zclconf/go-cty v1.17.0 // indirect
|
||||||
|
|
|
||||||
23
go.sum
23
go.sum
|
|
@ -4,8 +4,6 @@ github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJ
|
||||||
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
|
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
|
||||||
github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
|
github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
|
||||||
github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
||||||
github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
|
|
||||||
github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
|
||||||
github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
|
github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
|
||||||
github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
|
github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
|
||||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||||
|
|
@ -37,7 +35,6 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
|
||||||
github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8=
|
github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8=
|
||||||
github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4=
|
github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
|
||||||
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
||||||
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
|
@ -52,7 +49,10 @@ github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FM
|
||||||
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
||||||
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
||||||
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
||||||
|
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
|
||||||
|
github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
|
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
|
||||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
|
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
|
||||||
|
|
@ -129,14 +129,10 @@ github.com/hashicorp/terraform-exec v0.25.0 h1:Bkt6m3VkJqYh+laFMrWIpy9KHYFITpOyz
|
||||||
github.com/hashicorp/terraform-exec v0.25.0/go.mod h1:dl9IwsCfklDU6I4wq9/StFDp7dNbH/h5AnfS1RmiUl8=
|
github.com/hashicorp/terraform-exec v0.25.0/go.mod h1:dl9IwsCfklDU6I4wq9/StFDp7dNbH/h5AnfS1RmiUl8=
|
||||||
github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU=
|
github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU=
|
||||||
github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE=
|
github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE=
|
||||||
github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1 h1:eaI/3dsu2T5QAXbA+7N+B+UBj20GdtYnsRuYypKh3S4=
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1/go.mod h1:kpYM23L7NtcfaQdWAN0QFkV/lU0w16qJ2ddAPCI4zAg=
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0 h1:IKpc337XKk50QyQPSxLrHwdqSo1E2XqCMxFkWsZcTvc=
|
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0 h1:IKpc337XKk50QyQPSxLrHwdqSo1E2XqCMxFkWsZcTvc=
|
||||||
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0/go.mod h1:tT6wl80h7nsMBw+1yZRgJXi+Ys85PUai11weDqysvp4=
|
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0/go.mod h1:tT6wl80h7nsMBw+1yZRgJXi+Ys85PUai11weDqysvp4=
|
||||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.1.0 h1:flL5dprli2h54RxewQi6po02am0zXDRq6nsV6c4WQ/I=
|
github.com/hashicorp/terraform-plugin-codegen-spec v0.1.0 h1:flL5dprli2h54RxewQi6po02am0zXDRq6nsV6c4WQ/I=
|
||||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.1.0/go.mod h1:PQn6bDD8UWoAVJoHXqFk2i/RmLbeQBjbiP38i+E+YIw=
|
github.com/hashicorp/terraform-plugin-codegen-spec v0.1.0/go.mod h1:PQn6bDD8UWoAVJoHXqFk2i/RmLbeQBjbiP38i+E+YIw=
|
||||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 h1:91dQG1A/DxP6vRz9GiytDTrZTXDbhHPvmpYnAyWA/Vw=
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0/go.mod h1:fywrEKpordQypmAjz/HIfm2LuNVmyJ6KDe8XT9GdJxQ=
|
|
||||||
github.com/hashicorp/terraform-plugin-framework v1.18.0 h1:Xy6OfqSTZfAAKXSlJ810lYvuQvYkOpSUoNMQ9l2L1RA=
|
github.com/hashicorp/terraform-plugin-framework v1.18.0 h1:Xy6OfqSTZfAAKXSlJ810lYvuQvYkOpSUoNMQ9l2L1RA=
|
||||||
github.com/hashicorp/terraform-plugin-framework v1.18.0/go.mod h1:eeFIf68PME+kenJeqSrIcpHhYQK0TOyv7ocKdN4Z35E=
|
github.com/hashicorp/terraform-plugin-framework v1.18.0/go.mod h1:eeFIf68PME+kenJeqSrIcpHhYQK0TOyv7ocKdN4Z35E=
|
||||||
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow=
|
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow=
|
||||||
|
|
@ -165,8 +161,6 @@ github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:
|
||||||
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
||||||
github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
|
github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
|
||||||
github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
||||||
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
|
|
||||||
github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
|
||||||
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||||
github.com/ivanpirog/coloredcobra v1.0.1 h1:aURSdEmlR90/tSiWS0dMjdwOvCVUeYLfltLfbgNxrN4=
|
github.com/ivanpirog/coloredcobra v1.0.1 h1:aURSdEmlR90/tSiWS0dMjdwOvCVUeYLfltLfbgNxrN4=
|
||||||
|
|
@ -216,18 +210,21 @@ github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx
|
||||||
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
||||||
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||||
|
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||||
github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E=
|
github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E=
|
||||||
github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk=
|
github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk=
|
||||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
github.com/onsi/ginkgo v1.10.2/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
github.com/onsi/ginkgo v1.10.2/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
||||||
|
github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc=
|
||||||
github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0=
|
github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0=
|
||||||
github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c=
|
github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c=
|
||||||
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||||
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
||||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
||||||
github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
|
github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
|
||||||
|
github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw=
|
||||||
github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
|
github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
|
||||||
github.com/pb33f/libopenapi v0.15.0 h1:AoBYIY3HXqDDF8O9kcudlqWaRFZZJmgtueE649oHzIw=
|
github.com/pb33f/libopenapi v0.15.0 h1:AoBYIY3HXqDDF8O9kcudlqWaRFZZJmgtueE649oHzIw=
|
||||||
github.com/pb33f/libopenapi v0.15.0/go.mod h1:m+4Pwri31UvcnZjuP8M7TlbR906DXJmMvYsbis234xg=
|
github.com/pb33f/libopenapi v0.15.0/go.mod h1:m+4Pwri31UvcnZjuP8M7TlbR906DXJmMvYsbis234xg=
|
||||||
|
|
@ -252,16 +249,12 @@ github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQ
|
||||||
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||||
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
|
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
|
||||||
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
|
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
|
||||||
github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA=
|
|
||||||
github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
|
|
||||||
github.com/spf13/cobra v1.4.0 h1:y+wJpx64xcgO1V+RcnwW0LEHxTKRi2ZDPSBjWnrg88Q=
|
github.com/spf13/cobra v1.4.0 h1:y+wJpx64xcgO1V+RcnwW0LEHxTKRi2ZDPSBjWnrg88Q=
|
||||||
github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
|
github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/stackitcloud/stackit-sdk-go/core v0.22.0 h1:6rViz7GnNwXSh51Lur5xuDzO8EWSZfN9J0HvEkBKq6c=
|
github.com/stackitcloud/stackit-sdk-go/core v0.22.0 h1:6rViz7GnNwXSh51Lur5xuDzO8EWSZfN9J0HvEkBKq6c=
|
||||||
github.com/stackitcloud/stackit-sdk-go/core v0.22.0/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI=
|
github.com/stackitcloud/stackit-sdk-go/core v0.22.0/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI=
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha h1:ugpMOMUZGB0yXsWcfe97F7GCdjlexbjFuGD8ZeyMSts=
|
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h1:v5VGvTxLcCdJJmblbhqYalt/MFHcElDfYoy15CMhaWs=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
|
@ -287,8 +280,6 @@ github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM
|
||||||
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
|
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
|
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
|
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
|
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
|
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
|
||||||
|
|
@ -415,6 +406,7 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||||
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||||
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
||||||
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
||||||
|
|
@ -422,6 +414,7 @@ gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20191026110619-0b21df46bc1d/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20191026110619-0b21df46bc1d/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|
|
||||||
3
service_specs/postgres-flex/generator_settings.yml
Normal file
3
service_specs/postgres-flex/generator_settings.yml
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
versions:
|
||||||
|
- name: alpha
|
||||||
|
path: v3alpha1
|
||||||
5
service_specs/sqlserverflex/generator_settings.yml
Normal file
5
service_specs/sqlserverflex/generator_settings.yml
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
versions:
|
||||||
|
- name: alpha
|
||||||
|
path: v3alpha1
|
||||||
|
- name: beta
|
||||||
|
path: v3beta1
|
||||||
|
|
@ -10,14 +10,13 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/diag"
|
"github.com/hashicorp/terraform-plugin-framework/diag"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
|
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Ensure the implementation satisfies the expected interfaces.
|
// Ensure the implementation satisfies the expected interfaces.
|
||||||
|
|
@ -38,7 +37,7 @@ type dataSourceModel struct {
|
||||||
|
|
||||||
// databaseDataSource is the data source implementation.
|
// databaseDataSource is the data source implementation.
|
||||||
type databaseDataSource struct {
|
type databaseDataSource struct {
|
||||||
client *postgresflexalpha.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -144,7 +143,7 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
|
||||||
model *dataSourceModel,
|
model *dataSourceModel,
|
||||||
projectId, region, instanceId string,
|
projectId, region, instanceId string,
|
||||||
diags *diag.Diagnostics,
|
diags *diag.Diagnostics,
|
||||||
) (*postgresflexalpha.ListDatabase, error) {
|
) (*v3alpha1api.ListDatabase, error) {
|
||||||
isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
|
isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
|
||||||
isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
|
isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
|
||||||
|
|
||||||
|
|
@ -157,14 +156,14 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
|
||||||
}
|
}
|
||||||
|
|
||||||
if isIdSet {
|
if isIdSet {
|
||||||
databaseId := model.DatabaseId.ValueInt64()
|
databaseId := model.DatabaseId.ValueInt32()
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
||||||
return getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
|
return getDatabaseById(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseId)
|
||||||
}
|
}
|
||||||
|
|
||||||
databaseName := model.Name.ValueString()
|
databaseName := model.Name.ValueString()
|
||||||
ctx = tflog.SetField(ctx, "name", databaseName)
|
ctx = tflog.SetField(ctx, "name", databaseName)
|
||||||
return getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName)
|
return getDatabaseByName(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// handleReadError centralizes API error handling for the Read operation.
|
// handleReadError centralizes API error handling for the Read operation.
|
||||||
|
|
|
||||||
|
|
@ -14,12 +14,12 @@ import (
|
||||||
func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
|
func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
return schema.Schema{
|
return schema.Schema{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"database_id": schema.Int64Attribute{
|
"database_id": schema.Int32Attribute{
|
||||||
Required: true,
|
Required: true,
|
||||||
Description: "The ID of the database.",
|
Description: "The ID of the database.",
|
||||||
MarkdownDescription: "The ID of the database.",
|
MarkdownDescription: "The ID of the database.",
|
||||||
},
|
},
|
||||||
"tf_original_api_id": schema.Int64Attribute{
|
"tf_original_api_id": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The id of the database.",
|
Description: "The id of the database.",
|
||||||
MarkdownDescription: "The id of the database.",
|
MarkdownDescription: "The id of the database.",
|
||||||
|
|
@ -59,8 +59,8 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
}
|
}
|
||||||
|
|
||||||
type DatabaseModel struct {
|
type DatabaseModel struct {
|
||||||
DatabaseId types.Int64 `tfsdk:"database_id"`
|
DatabaseId types.Int32 `tfsdk:"database_id"`
|
||||||
Id types.Int64 `tfsdk:"tf_original_api_id"`
|
Id types.Int32 `tfsdk:"tf_original_api_id"`
|
||||||
InstanceId types.String `tfsdk:"instance_id"`
|
InstanceId types.String `tfsdk:"instance_id"`
|
||||||
Name types.String `tfsdk:"name"`
|
Name types.String `tfsdk:"name"`
|
||||||
Owner types.String `tfsdk:"owner"`
|
Owner types.String `tfsdk:"owner"`
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
"databases": schema.ListNestedAttribute{
|
"databases": schema.ListNestedAttribute{
|
||||||
NestedObject: schema.NestedAttributeObject{
|
NestedObject: schema.NestedAttributeObject{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"id": schema.Int64Attribute{
|
"id": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The id of the database.",
|
Description: "The id of the database.",
|
||||||
MarkdownDescription: "The id of the database.",
|
MarkdownDescription: "The id of the database.",
|
||||||
|
|
@ -54,7 +54,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "The ID of the instance.",
|
Description: "The ID of the instance.",
|
||||||
MarkdownDescription: "The ID of the instance.",
|
MarkdownDescription: "The ID of the instance.",
|
||||||
},
|
},
|
||||||
"page": schema.Int64Attribute{
|
"page": schema.Int32Attribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "Number of the page of items list to be returned.",
|
Description: "Number of the page of items list to be returned.",
|
||||||
|
|
@ -62,19 +62,19 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
},
|
},
|
||||||
"pagination": schema.SingleNestedAttribute{
|
"pagination": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"page": schema.Int64Attribute{
|
"page": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"size": schema.Int64Attribute{
|
"size": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"sort": schema.StringAttribute{
|
"sort": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"total_pages": schema.Int64Attribute{
|
"total_pages": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"total_rows": schema.Int64Attribute{
|
"total_rows": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -100,7 +100,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"size": schema.Int64Attribute{
|
"size": schema.Int32Attribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "Number of items to be returned on each page.",
|
Description: "Number of items to be returned on each page.",
|
||||||
|
|
@ -131,11 +131,11 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
type DatabasesModel struct {
|
type DatabasesModel struct {
|
||||||
Databases types.List `tfsdk:"databases"`
|
Databases types.List `tfsdk:"databases"`
|
||||||
InstanceId types.String `tfsdk:"instance_id"`
|
InstanceId types.String `tfsdk:"instance_id"`
|
||||||
Page types.Int64 `tfsdk:"page"`
|
Page types.Int32 `tfsdk:"page"`
|
||||||
Pagination PaginationValue `tfsdk:"pagination"`
|
Pagination PaginationValue `tfsdk:"pagination"`
|
||||||
ProjectId types.String `tfsdk:"project_id"`
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
Region types.String `tfsdk:"region"`
|
Region types.String `tfsdk:"region"`
|
||||||
Size types.Int64 `tfsdk:"size"`
|
Size types.Int32 `tfsdk:"size"`
|
||||||
Sort types.String `tfsdk:"sort"`
|
Sort types.String `tfsdk:"sort"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -174,12 +174,12 @@ func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectV
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
idVal, ok := idAttribute.(basetypes.Int64Value)
|
idVal, ok := idAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
|
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
nameAttribute, ok := attributes["name"]
|
nameAttribute, ok := attributes["name"]
|
||||||
|
|
@ -303,12 +303,12 @@ func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[strin
|
||||||
return NewDatabasesValueUnknown(), diags
|
return NewDatabasesValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
idVal, ok := idAttribute.(basetypes.Int64Value)
|
idVal, ok := idAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
|
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
nameAttribute, ok := attributes["name"]
|
nameAttribute, ok := attributes["name"]
|
||||||
|
|
@ -427,7 +427,7 @@ func (t DatabasesType) ValueType(ctx context.Context) attr.Value {
|
||||||
var _ basetypes.ObjectValuable = DatabasesValue{}
|
var _ basetypes.ObjectValuable = DatabasesValue{}
|
||||||
|
|
||||||
type DatabasesValue struct {
|
type DatabasesValue struct {
|
||||||
Id basetypes.Int64Value `tfsdk:"id"`
|
Id basetypes.Int32Value `tfsdk:"id"`
|
||||||
Name basetypes.StringValue `tfsdk:"name"`
|
Name basetypes.StringValue `tfsdk:"name"`
|
||||||
Owner basetypes.StringValue `tfsdk:"owner"`
|
Owner basetypes.StringValue `tfsdk:"owner"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
|
|
@ -439,7 +439,7 @@ func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, er
|
||||||
var val tftypes.Value
|
var val tftypes.Value
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["id"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
|
|
||||||
|
|
@ -503,7 +503,7 @@ func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValu
|
||||||
var diags diag.Diagnostics
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
"id": basetypes.Int64Type{},
|
"id": basetypes.Int32Type{},
|
||||||
"name": basetypes.StringType{},
|
"name": basetypes.StringType{},
|
||||||
"owner": basetypes.StringType{},
|
"owner": basetypes.StringType{},
|
||||||
}
|
}
|
||||||
|
|
@ -567,7 +567,7 @@ func (v DatabasesValue) Type(ctx context.Context) attr.Type {
|
||||||
|
|
||||||
func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"id": basetypes.Int64Type{},
|
"id": basetypes.Int32Type{},
|
||||||
"name": basetypes.StringType{},
|
"name": basetypes.StringType{},
|
||||||
"owner": basetypes.StringType{},
|
"owner": basetypes.StringType{},
|
||||||
}
|
}
|
||||||
|
|
@ -608,12 +608,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
pageVal, ok := pageAttribute.(basetypes.Int64Value)
|
pageVal, ok := pageAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
|
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeAttribute, ok := attributes["size"]
|
sizeAttribute, ok := attributes["size"]
|
||||||
|
|
@ -626,12 +626,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
|
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
|
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sortAttribute, ok := attributes["sort"]
|
sortAttribute, ok := attributes["sort"]
|
||||||
|
|
@ -662,12 +662,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
|
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
|
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsAttribute, ok := attributes["total_rows"]
|
totalRowsAttribute, ok := attributes["total_rows"]
|
||||||
|
|
@ -680,12 +680,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
|
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
|
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -775,12 +775,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
pageVal, ok := pageAttribute.(basetypes.Int64Value)
|
pageVal, ok := pageAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
|
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeAttribute, ok := attributes["size"]
|
sizeAttribute, ok := attributes["size"]
|
||||||
|
|
@ -793,12 +793,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
|
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
|
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sortAttribute, ok := attributes["sort"]
|
sortAttribute, ok := attributes["sort"]
|
||||||
|
|
@ -829,12 +829,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
|
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
|
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsAttribute, ok := attributes["total_rows"]
|
totalRowsAttribute, ok := attributes["total_rows"]
|
||||||
|
|
@ -847,12 +847,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
|
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
|
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -937,11 +937,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
|
||||||
var _ basetypes.ObjectValuable = PaginationValue{}
|
var _ basetypes.ObjectValuable = PaginationValue{}
|
||||||
|
|
||||||
type PaginationValue struct {
|
type PaginationValue struct {
|
||||||
Page basetypes.Int64Value `tfsdk:"page"`
|
Page basetypes.Int32Value `tfsdk:"page"`
|
||||||
Size basetypes.Int64Value `tfsdk:"size"`
|
Size basetypes.Int32Value `tfsdk:"size"`
|
||||||
Sort basetypes.StringValue `tfsdk:"sort"`
|
Sort basetypes.StringValue `tfsdk:"sort"`
|
||||||
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
|
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
|
||||||
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
|
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -951,11 +951,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
|
||||||
var val tftypes.Value
|
var val tftypes.Value
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
|
|
||||||
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
||||||
|
|
||||||
|
|
@ -1033,11 +1033,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
|
||||||
var diags diag.Diagnostics
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
"page": basetypes.Int64Type{},
|
"page": basetypes.Int32Type{},
|
||||||
"size": basetypes.Int64Type{},
|
"size": basetypes.Int32Type{},
|
||||||
"sort": basetypes.StringType{},
|
"sort": basetypes.StringType{},
|
||||||
"total_pages": basetypes.Int64Type{},
|
"total_pages": basetypes.Int32Type{},
|
||||||
"total_rows": basetypes.Int64Type{},
|
"total_rows": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
|
|
||||||
if v.IsNull() {
|
if v.IsNull() {
|
||||||
|
|
@ -1109,10 +1109,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
|
||||||
|
|
||||||
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"page": basetypes.Int64Type{},
|
"page": basetypes.Int32Type{},
|
||||||
"size": basetypes.Int64Type{},
|
"size": basetypes.Int32Type{},
|
||||||
"sort": basetypes.StringType{},
|
"sort": basetypes.StringType{},
|
||||||
"total_pages": basetypes.Int64Type{},
|
"total_pages": basetypes.Int32Type{},
|
||||||
"total_rows": basetypes.Int64Type{},
|
"total_rows": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
)
|
)
|
||||||
|
|
||||||
// databaseClientReader represents the contract to listing databases from postgresflex.APIClient.
|
// databaseClientReader represents the contract to listing databases from postgresflex.APIClient.
|
||||||
|
|
@ -15,7 +15,7 @@ type databaseClientReader interface {
|
||||||
projectId string,
|
projectId string,
|
||||||
region string,
|
region string,
|
||||||
instanceId string,
|
instanceId string,
|
||||||
) postgresflex.ApiListDatabasesRequestRequest
|
) v3alpha1api.ApiListDatabasesRequestRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
// getDatabaseById gets a database by its ID.
|
// getDatabaseById gets a database by its ID.
|
||||||
|
|
@ -23,10 +23,10 @@ func getDatabaseById(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
client databaseClientReader,
|
client databaseClientReader,
|
||||||
projectId, region, instanceId string,
|
projectId, region, instanceId string,
|
||||||
databaseId int64,
|
databaseId int32,
|
||||||
) (*postgresflex.ListDatabase, error) {
|
) (*v3alpha1api.ListDatabase, error) {
|
||||||
filter := func(db postgresflex.ListDatabase) bool {
|
filter := func(db v3alpha1api.ListDatabase) bool {
|
||||||
return db.Id != nil && *db.Id == databaseId
|
return db.Id == databaseId
|
||||||
}
|
}
|
||||||
return getDatabase(ctx, client, projectId, region, instanceId, filter)
|
return getDatabase(ctx, client, projectId, region, instanceId, filter)
|
||||||
}
|
}
|
||||||
|
|
@ -36,9 +36,9 @@ func getDatabaseByName(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
client databaseClientReader,
|
client databaseClientReader,
|
||||||
projectId, region, instanceId, databaseName string,
|
projectId, region, instanceId, databaseName string,
|
||||||
) (*postgresflex.ListDatabase, error) {
|
) (*v3alpha1api.ListDatabase, error) {
|
||||||
filter := func(db postgresflex.ListDatabase) bool {
|
filter := func(db v3alpha1api.ListDatabase) bool {
|
||||||
return db.Name != nil && *db.Name == databaseName
|
return db.Name == databaseName
|
||||||
}
|
}
|
||||||
return getDatabase(ctx, client, projectId, region, instanceId, filter)
|
return getDatabase(ctx, client, projectId, region, instanceId, filter)
|
||||||
}
|
}
|
||||||
|
|
@ -49,8 +49,8 @@ func getDatabase(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
client databaseClientReader,
|
client databaseClientReader,
|
||||||
projectId, region, instanceId string,
|
projectId, region, instanceId string,
|
||||||
filter func(db postgresflex.ListDatabase) bool,
|
filter func(db v3alpha1api.ListDatabase) bool,
|
||||||
) (*postgresflex.ListDatabase, error) {
|
) (*v3alpha1api.ListDatabase, error) {
|
||||||
if projectId == "" || region == "" || instanceId == "" {
|
if projectId == "" || region == "" || instanceId == "" {
|
||||||
return nil, fmt.Errorf("all parameters (project, region, instance) are required")
|
return nil, fmt.Errorf("all parameters (project, region, instance) are required")
|
||||||
}
|
}
|
||||||
|
|
@ -59,18 +59,18 @@ func getDatabase(
|
||||||
|
|
||||||
for page := int32(1); ; page++ {
|
for page := int32(1); ; page++ {
|
||||||
res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId).
|
res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId).
|
||||||
Page(page).Size(pageSize).Sort(postgresflex.DATABASESORT_DATABASE_ID_ASC).Execute()
|
Page(page).Size(pageSize).Sort(v3alpha1api.DATABASESORT_DATABASE_ID_ASC).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("requesting database list (page %d): %w", page, err)
|
return nil, fmt.Errorf("requesting database list (page %d): %w", page, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the API returns no databases, we have reached the end of the list.
|
// If the API returns no databases, we have reached the end of the list.
|
||||||
if res.Databases == nil || len(*res.Databases) == 0 {
|
if res.Databases == nil || len(res.Databases) == 0 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
// Iterate over databases to find a match
|
// Iterate over databases to find a match
|
||||||
for _, db := range *res.Databases {
|
for _, db := range res.Databases {
|
||||||
if filter(db) {
|
if filter(db) {
|
||||||
foundDb := db
|
foundDb := db
|
||||||
return &foundDb, nil
|
return &foundDb, nil
|
||||||
|
|
@ -82,10 +82,6 @@ func getDatabase(
|
||||||
}
|
}
|
||||||
|
|
||||||
// cleanString removes leading and trailing quotes which are sometimes returned by the API.
|
// cleanString removes leading and trailing quotes which are sometimes returned by the API.
|
||||||
func cleanString(s *string) *string {
|
func cleanString(s string) string {
|
||||||
if s == nil {
|
return strings.Trim(s, "\"")
|
||||||
return nil
|
|
||||||
}
|
|
||||||
res := strings.Trim(*s, "\"")
|
|
||||||
return &res
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ import (
|
||||||
"github.com/google/go-cmp/cmp"
|
"github.com/google/go-cmp/cmp"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
|
||||||
)
|
)
|
||||||
|
|
||||||
type mockRequest struct {
|
type mockRequest struct {
|
||||||
|
|
@ -37,28 +37,28 @@ func (m *mockDBClient) ListDatabasesRequest(
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetDatabase(t *testing.T) {
|
func TestGetDatabase(t *testing.T) {
|
||||||
mockResp := func(page int64) (*postgresflex.ListDatabasesResponse, error) {
|
mockResp := func(page int32) (*postgresflex.ListDatabasesResponse, error) {
|
||||||
if page == 1 {
|
if page == 1 {
|
||||||
return &postgresflex.ListDatabasesResponse{
|
return &postgresflex.ListDatabasesResponse{
|
||||||
Databases: &[]postgresflex.ListDatabase{
|
Databases: &[]postgresflex.ListDatabase{
|
||||||
{Id: utils.Ptr(int64(1)), Name: utils.Ptr("first")},
|
{Id: utils.Ptr(int32(1)), Name: utils.Ptr("first")},
|
||||||
{Id: utils.Ptr(int64(2)), Name: utils.Ptr("second")},
|
{Id: utils.Ptr(int32(2)), Name: utils.Ptr("second")},
|
||||||
},
|
},
|
||||||
Pagination: &postgresflex.Pagination{
|
Pagination: &postgresflex.Pagination{
|
||||||
Page: utils.Ptr(int64(1)),
|
Page: utils.Ptr(int32(1)),
|
||||||
TotalPages: utils.Ptr(int64(2)),
|
TotalPages: utils.Ptr(int32(2)),
|
||||||
Size: utils.Ptr(int64(3)),
|
Size: utils.Ptr(int32(3)),
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if page == 2 {
|
if page == 2 {
|
||||||
return &postgresflex.ListDatabasesResponse{
|
return &postgresflex.ListDatabasesResponse{
|
||||||
Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int64(3)), Name: utils.Ptr("three")}},
|
Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int32(3)), Name: utils.Ptr("three")}},
|
||||||
Pagination: &postgresflex.Pagination{
|
Pagination: &postgresflex.Pagination{
|
||||||
Page: utils.Ptr(int64(2)),
|
Page: utils.Ptr(int32(2)),
|
||||||
TotalPages: utils.Ptr(int64(2)),
|
TotalPages: utils.Ptr(int32(2)),
|
||||||
Size: utils.Ptr(int64(3)),
|
Size: utils.Ptr(int32(3)),
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
@ -66,9 +66,9 @@ func TestGetDatabase(t *testing.T) {
|
||||||
return &postgresflex.ListDatabasesResponse{
|
return &postgresflex.ListDatabasesResponse{
|
||||||
Databases: &[]postgresflex.ListDatabase{},
|
Databases: &[]postgresflex.ListDatabase{},
|
||||||
Pagination: &postgresflex.Pagination{
|
Pagination: &postgresflex.Pagination{
|
||||||
Page: utils.Ptr(int64(3)),
|
Page: utils.Ptr(int32(3)),
|
||||||
TotalPages: utils.Ptr(int64(2)),
|
TotalPages: utils.Ptr(int32(2)),
|
||||||
Size: utils.Ptr(int64(3)),
|
Size: utils.Ptr(int32(3)),
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
@ -80,7 +80,7 @@ func TestGetDatabase(t *testing.T) {
|
||||||
instanceId string
|
instanceId string
|
||||||
wantErr bool
|
wantErr bool
|
||||||
wantDbName string
|
wantDbName string
|
||||||
wantDbId int64
|
wantDbId int32
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
description: "Success - Found by name on first page",
|
description: "Success - Found by name on first page",
|
||||||
|
|
@ -133,7 +133,7 @@ func TestGetDatabase(t *testing.T) {
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(
|
||||||
tt.description, func(t *testing.T) {
|
tt.description, func(t *testing.T) {
|
||||||
var currentPage int64
|
var currentPage int32
|
||||||
client := &mockDBClient{
|
client := &mockDBClient{
|
||||||
executeRequest: func() postgresflex.ApiListDatabasesRequestRequest {
|
executeRequest: func() postgresflex.ApiListDatabasesRequestRequest {
|
||||||
return &mockRequest{
|
return &mockRequest{
|
||||||
|
|
|
||||||
|
|
@ -2,43 +2,42 @@ package postgresflexalpha
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
|
// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
|
||||||
func mapFields(
|
func mapFields(
|
||||||
source *postgresflexalpha.ListDatabase,
|
source *v3alpha1api.ListDatabase,
|
||||||
model *dataSourceModel,
|
model *dataSourceModel,
|
||||||
region string,
|
region string,
|
||||||
) error {
|
) error {
|
||||||
if source == nil {
|
if source == nil {
|
||||||
return fmt.Errorf("response is nil")
|
return fmt.Errorf("response is nil")
|
||||||
}
|
}
|
||||||
if source.Id == nil || *source.Id == 0 {
|
if source.Id == 0 {
|
||||||
return fmt.Errorf("id not present")
|
return fmt.Errorf("id not present")
|
||||||
}
|
}
|
||||||
if model == nil {
|
if model == nil {
|
||||||
return fmt.Errorf("model given is nil")
|
return fmt.Errorf("model given is nil")
|
||||||
}
|
}
|
||||||
|
|
||||||
var databaseId int64
|
var databaseId int32
|
||||||
if model.DatabaseId.ValueInt64() != 0 {
|
if model.DatabaseId.ValueInt32() != 0 {
|
||||||
databaseId = model.DatabaseId.ValueInt64()
|
databaseId = model.DatabaseId.ValueInt32()
|
||||||
} else if source.Id != nil {
|
} else if source.Id != 0 {
|
||||||
databaseId = *source.Id
|
databaseId = source.Id
|
||||||
} else {
|
} else {
|
||||||
return fmt.Errorf("database id not present")
|
return fmt.Errorf("database id not present")
|
||||||
}
|
}
|
||||||
|
|
||||||
model.Id = types.Int64Value(databaseId)
|
model.Id = types.Int32Value(databaseId)
|
||||||
model.DatabaseId = types.Int64Value(databaseId)
|
model.DatabaseId = types.Int32Value(databaseId)
|
||||||
model.Name = types.StringValue(source.GetName())
|
model.Name = types.StringValue(source.GetName())
|
||||||
model.Owner = types.StringPointerValue(cleanString(source.Owner))
|
model.Owner = types.StringValue(cleanString(source.Owner))
|
||||||
model.Region = types.StringValue(region)
|
model.Region = types.StringValue(region)
|
||||||
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
|
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
|
||||||
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
|
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
|
||||||
|
|
@ -46,48 +45,48 @@ func mapFields(
|
||||||
model.ProjectId.ValueString(),
|
model.ProjectId.ValueString(),
|
||||||
region,
|
region,
|
||||||
model.InstanceId.ValueString(),
|
model.InstanceId.ValueString(),
|
||||||
strconv.FormatInt(databaseId, 10),
|
string(databaseId),
|
||||||
)
|
)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource.
|
// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource.
|
||||||
func mapResourceFields(source *postgresflexalpha.GetDatabaseResponse, model *resourceModel) error {
|
func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceModel) error {
|
||||||
if source == nil {
|
if source == nil {
|
||||||
return fmt.Errorf("response is nil")
|
return fmt.Errorf("response is nil")
|
||||||
}
|
}
|
||||||
if source.Id == nil || *source.Id == 0 {
|
if source.Id == 0 {
|
||||||
return fmt.Errorf("id not present")
|
return fmt.Errorf("id not present")
|
||||||
}
|
}
|
||||||
if model == nil {
|
if model == nil {
|
||||||
return fmt.Errorf("model input is nil")
|
return fmt.Errorf("model input is nil")
|
||||||
}
|
}
|
||||||
|
|
||||||
var databaseId int64
|
var databaseId int32
|
||||||
if model.Id.ValueInt64() != 0 {
|
if model.Id.ValueInt32() != 0 {
|
||||||
databaseId = model.Id.ValueInt64()
|
databaseId = model.Id.ValueInt32()
|
||||||
} else if source.Id != nil {
|
} else if source.Id != 0 {
|
||||||
databaseId = *source.Id
|
databaseId = source.Id
|
||||||
} else {
|
} else {
|
||||||
return fmt.Errorf("database id not present")
|
return fmt.Errorf("database id not present")
|
||||||
}
|
}
|
||||||
|
|
||||||
model.Id = types.Int64Value(databaseId)
|
model.Id = types.Int32Value(databaseId)
|
||||||
model.DatabaseId = types.Int64Value(databaseId)
|
model.DatabaseId = types.Int32Value(databaseId)
|
||||||
model.Name = types.StringValue(source.GetName())
|
model.Name = types.StringValue(source.GetName())
|
||||||
model.Owner = types.StringPointerValue(cleanString(source.Owner))
|
model.Owner = types.StringValue(cleanString(source.Owner))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// toCreatePayload converts the resource model to an API create payload.
|
// toCreatePayload converts the resource model to an API create payload.
|
||||||
func toCreatePayload(model *resourceModel) (*postgresflexalpha.CreateDatabaseRequestPayload, error) {
|
func toCreatePayload(model *resourceModel) (*v3alpha1api.CreateDatabaseRequestPayload, error) {
|
||||||
if model == nil {
|
if model == nil {
|
||||||
return nil, fmt.Errorf("nil model")
|
return nil, fmt.Errorf("nil model")
|
||||||
}
|
}
|
||||||
|
|
||||||
return &postgresflexalpha.CreateDatabaseRequestPayload{
|
return &v3alpha1api.CreateDatabaseRequestPayload{
|
||||||
Name: model.Name.ValueStringPointer(),
|
Name: model.Name.ValueString(),
|
||||||
Owner: model.Owner.ValueStringPointer(),
|
Owner: model.Owner.ValueStringPointer(),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
|
||||||
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
|
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -31,7 +31,7 @@ func TestMapFields(t *testing.T) {
|
||||||
name: "should map fields correctly",
|
name: "should map fields correctly",
|
||||||
given: given{
|
given: given{
|
||||||
source: &postgresflexalpha.ListDatabase{
|
source: &postgresflexalpha.ListDatabase{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: utils.Ptr(int32(1)),
|
||||||
Name: utils.Ptr("my-db"),
|
Name: utils.Ptr("my-db"),
|
||||||
Owner: utils.Ptr("\"my-owner\""),
|
Owner: utils.Ptr("\"my-owner\""),
|
||||||
},
|
},
|
||||||
|
|
@ -46,11 +46,11 @@ func TestMapFields(t *testing.T) {
|
||||||
expected: expected{
|
expected: expected{
|
||||||
model: &dataSourceModel{
|
model: &dataSourceModel{
|
||||||
DatabaseModel: datasource.DatabaseModel{
|
DatabaseModel: datasource.DatabaseModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.Int32Value(1),
|
||||||
Name: types.StringValue("my-db"),
|
Name: types.StringValue("my-db"),
|
||||||
Owner: types.StringValue("my-owner"),
|
Owner: types.StringValue("my-owner"),
|
||||||
Region: types.StringValue("eu01"),
|
Region: types.StringValue("eu01"),
|
||||||
DatabaseId: types.Int64Value(1),
|
DatabaseId: types.Int32Value(1),
|
||||||
InstanceId: types.StringValue("my-instance"),
|
InstanceId: types.StringValue("my-instance"),
|
||||||
ProjectId: types.StringValue("my-project"),
|
ProjectId: types.StringValue("my-project"),
|
||||||
},
|
},
|
||||||
|
|
@ -62,12 +62,12 @@ func TestMapFields(t *testing.T) {
|
||||||
name: "should preserve existing model ID",
|
name: "should preserve existing model ID",
|
||||||
given: given{
|
given: given{
|
||||||
source: &postgresflexalpha.ListDatabase{
|
source: &postgresflexalpha.ListDatabase{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: utils.Ptr(int32(1)),
|
||||||
Name: utils.Ptr("my-db"),
|
Name: utils.Ptr("my-db"),
|
||||||
},
|
},
|
||||||
model: &dataSourceModel{
|
model: &dataSourceModel{
|
||||||
DatabaseModel: datasource.DatabaseModel{
|
DatabaseModel: datasource.DatabaseModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.Int32Value(1),
|
||||||
ProjectId: types.StringValue("my-project"),
|
ProjectId: types.StringValue("my-project"),
|
||||||
InstanceId: types.StringValue("my-instance"),
|
InstanceId: types.StringValue("my-instance"),
|
||||||
},
|
},
|
||||||
|
|
@ -77,9 +77,9 @@ func TestMapFields(t *testing.T) {
|
||||||
expected: expected{
|
expected: expected{
|
||||||
model: &dataSourceModel{
|
model: &dataSourceModel{
|
||||||
DatabaseModel: datasource.DatabaseModel{
|
DatabaseModel: datasource.DatabaseModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.Int32Value(1),
|
||||||
Name: types.StringValue("my-db"),
|
Name: types.StringValue("my-db"),
|
||||||
Owner: types.StringNull(), DatabaseId: types.Int64Value(1),
|
Owner: types.StringNull(), DatabaseId: types.Int32Value(1),
|
||||||
Region: types.StringValue("eu01"),
|
Region: types.StringValue("eu01"),
|
||||||
InstanceId: types.StringValue("my-instance"),
|
InstanceId: types.StringValue("my-instance"),
|
||||||
ProjectId: types.StringValue("my-project"),
|
ProjectId: types.StringValue("my-project"),
|
||||||
|
|
@ -107,7 +107,7 @@ func TestMapFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "should fail on nil model",
|
name: "should fail on nil model",
|
||||||
given: given{
|
given: given{
|
||||||
source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(int64(1))},
|
source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(Int32(1))},
|
||||||
model: nil,
|
model: nil,
|
||||||
},
|
},
|
||||||
expected: expected{err: true},
|
expected: expected{err: true},
|
||||||
|
|
@ -150,7 +150,7 @@ func TestMapResourceFields(t *testing.T) {
|
||||||
name: "should map fields correctly",
|
name: "should map fields correctly",
|
||||||
given: given{
|
given: given{
|
||||||
source: &postgresflexalpha.GetDatabaseResponse{
|
source: &postgresflexalpha.GetDatabaseResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: utils.Ptr(Int32(1)),
|
||||||
Name: utils.Ptr("my-db"),
|
Name: utils.Ptr("my-db"),
|
||||||
Owner: utils.Ptr("my-owner"),
|
Owner: utils.Ptr("my-owner"),
|
||||||
},
|
},
|
||||||
|
|
@ -158,10 +158,10 @@ func TestMapResourceFields(t *testing.T) {
|
||||||
},
|
},
|
||||||
expected: expected{
|
expected: expected{
|
||||||
model: &resourceModel{
|
model: &resourceModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.Int32Value(1),
|
||||||
Name: types.StringValue("my-db"),
|
Name: types.StringValue("my-db"),
|
||||||
Owner: types.StringValue("my-owner"),
|
Owner: types.StringValue("my-owner"),
|
||||||
DatabaseId: types.Int64Value(1),
|
DatabaseId: types.Int32Value(1),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -14,14 +14,14 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
|
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
|
postgresflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
postgresflexalpha3 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
|
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
@ -43,19 +43,19 @@ func NewDatabaseResource() resource.Resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
// resourceModel describes the resource data model.
|
// resourceModel describes the resource data model.
|
||||||
type resourceModel = postgresflexalpha2.DatabaseModel
|
type resourceModel = postgresflexalphaResGen.DatabaseModel
|
||||||
|
|
||||||
// DatabaseResourceIdentityModel describes the resource's identity attributes.
|
// DatabaseResourceIdentityModel describes the resource's identity attributes.
|
||||||
type DatabaseResourceIdentityModel struct {
|
type DatabaseResourceIdentityModel struct {
|
||||||
ProjectID types.String `tfsdk:"project_id"`
|
ProjectID types.String `tfsdk:"project_id"`
|
||||||
Region types.String `tfsdk:"region"`
|
Region types.String `tfsdk:"region"`
|
||||||
InstanceID types.String `tfsdk:"instance_id"`
|
InstanceID types.String `tfsdk:"instance_id"`
|
||||||
DatabaseID types.Int64 `tfsdk:"database_id"`
|
DatabaseID types.Int32 `tfsdk:"database_id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// databaseResource is the resource implementation.
|
// databaseResource is the resource implementation.
|
||||||
type databaseResource struct {
|
type databaseResource struct {
|
||||||
client *postgresflexalpha.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -122,7 +122,7 @@ var modifiersFileByte []byte
|
||||||
|
|
||||||
// Schema defines the schema for the resource.
|
// Schema defines the schema for the resource.
|
||||||
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||||
s := postgresflexalpha2.DatabaseResourceSchema(ctx)
|
s := postgresflexalphaResGen.DatabaseResourceSchema(ctx)
|
||||||
|
|
||||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -155,7 +155,7 @@ func (r *databaseResource) IdentitySchema(
|
||||||
"instance_id": identityschema.StringAttribute{
|
"instance_id": identityschema.StringAttribute{
|
||||||
RequiredForImport: true,
|
RequiredForImport: true,
|
||||||
},
|
},
|
||||||
"database_id": identityschema.Int64Attribute{
|
"database_id": identityschema.Int32Attribute{
|
||||||
RequiredForImport: true,
|
RequiredForImport: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -198,7 +198,7 @@ func (r *databaseResource) Create(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create new database
|
// Create new database
|
||||||
databaseResp, err := r.client.CreateDatabaseRequest(
|
databaseResp, err := r.client.DefaultAPI.CreateDatabaseRequest(
|
||||||
ctx,
|
ctx,
|
||||||
projectId,
|
projectId,
|
||||||
region,
|
region,
|
||||||
|
|
@ -209,16 +209,17 @@ func (r *databaseResource) Create(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if databaseResp == nil || databaseResp.Id == nil {
|
dbID, ok := databaseResp.GetIdOk()
|
||||||
|
if !ok {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
funcErrorSummary,
|
funcErrorSummary,
|
||||||
"API didn't return database Id. A database might have been created",
|
"API didn't return database Id. A database might although have been created",
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
databaseId := *databaseResp.Id
|
databaseId := *dbID
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
|
|
||||||
|
|
@ -227,14 +228,14 @@ func (r *databaseResource) Create(
|
||||||
ProjectID: types.StringValue(projectId),
|
ProjectID: types.StringValue(projectId),
|
||||||
Region: types.StringValue(region),
|
Region: types.StringValue(region),
|
||||||
InstanceID: types.StringValue(instanceId),
|
InstanceID: types.StringValue(instanceId),
|
||||||
DatabaseID: types.Int64Value(databaseId),
|
DatabaseID: types.Int32Value(int32(databaseId)),
|
||||||
}
|
}
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
database, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
|
database, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
||||||
SetTimeout(15 * time.Minute).
|
SetTimeout(15 * time.Minute).
|
||||||
SetSleepBeforeWait(15 * time.Second).
|
SetSleepBeforeWait(15 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
|
|
@ -286,14 +287,14 @@ func (r *databaseResource) Read(
|
||||||
projectId := model.ProjectId.ValueString()
|
projectId := model.ProjectId.ValueString()
|
||||||
instanceId := model.InstanceId.ValueString()
|
instanceId := model.InstanceId.ValueString()
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
databaseId := model.DatabaseId.ValueInt64()
|
databaseId := model.DatabaseId.ValueInt32()
|
||||||
|
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
||||||
|
|
||||||
databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
|
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
||||||
SetTimeout(15 * time.Minute).
|
SetTimeout(15 * time.Minute).
|
||||||
SetSleepBeforeWait(15 * time.Second).
|
SetSleepBeforeWait(15 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
|
|
@ -327,7 +328,7 @@ func (r *databaseResource) Read(
|
||||||
ProjectID: types.StringValue(projectId),
|
ProjectID: types.StringValue(projectId),
|
||||||
Region: types.StringValue(region),
|
Region: types.StringValue(region),
|
||||||
InstanceID: types.StringValue(instanceId),
|
InstanceID: types.StringValue(instanceId),
|
||||||
DatabaseID: types.Int64Value(databaseId),
|
DatabaseID: types.Int32Value(databaseId),
|
||||||
}
|
}
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
|
|
@ -361,13 +362,7 @@ func (r *databaseResource) Update(
|
||||||
projectId := model.ProjectId.ValueString()
|
projectId := model.ProjectId.ValueString()
|
||||||
instanceId := model.InstanceId.ValueString()
|
instanceId := model.InstanceId.ValueString()
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
databaseId64 := model.DatabaseId.ValueInt64()
|
databaseId := model.DatabaseId.ValueInt32()
|
||||||
|
|
||||||
if databaseId64 > math.MaxInt32 {
|
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
databaseId := int32(databaseId64) // nolint:gosec // check is performed above
|
|
||||||
|
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
||||||
|
|
@ -383,7 +378,7 @@ func (r *databaseResource) Update(
|
||||||
}
|
}
|
||||||
|
|
||||||
modified := false
|
modified := false
|
||||||
var payload postgresflexalpha.UpdateDatabasePartiallyRequestPayload
|
var payload v3alpha1api.UpdateDatabasePartiallyRequestPayload
|
||||||
if stateModel.Name != model.Name {
|
if stateModel.Name != model.Name {
|
||||||
payload.Name = model.Name.ValueStringPointer()
|
payload.Name = model.Name.ValueStringPointer()
|
||||||
modified = true
|
modified = true
|
||||||
|
|
@ -400,7 +395,7 @@ func (r *databaseResource) Update(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update existing database
|
// Update existing database
|
||||||
err := r.client.UpdateDatabasePartiallyRequest(
|
err := r.client.DefaultAPI.UpdateDatabasePartiallyRequest(
|
||||||
ctx,
|
ctx,
|
||||||
projectId,
|
projectId,
|
||||||
region,
|
region,
|
||||||
|
|
@ -414,7 +409,7 @@ func (r *databaseResource) Update(
|
||||||
|
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
|
|
||||||
databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId64).
|
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
||||||
SetTimeout(15 * time.Minute).
|
SetTimeout(15 * time.Minute).
|
||||||
SetSleepBeforeWait(15 * time.Second).
|
SetSleepBeforeWait(15 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
|
|
@ -442,7 +437,7 @@ func (r *databaseResource) Update(
|
||||||
ProjectID: types.StringValue(projectId),
|
ProjectID: types.StringValue(projectId),
|
||||||
Region: types.StringValue(region),
|
Region: types.StringValue(region),
|
||||||
InstanceID: types.StringValue(instanceId),
|
InstanceID: types.StringValue(instanceId),
|
||||||
DatabaseID: types.Int64Value(databaseId64),
|
DatabaseID: types.Int32Value(databaseId),
|
||||||
}
|
}
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
|
|
@ -500,7 +495,7 @@ func (r *databaseResource) Delete(
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
||||||
|
|
||||||
// Delete existing record set
|
// Delete existing record set
|
||||||
err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseId)
|
err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseId).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
|
||||||
}
|
}
|
||||||
|
|
@ -572,7 +567,7 @@ func (r *databaseResource) ImportState(
|
||||||
projectId := identityData.ProjectID.ValueString()
|
projectId := identityData.ProjectID.ValueString()
|
||||||
region := identityData.Region.ValueString()
|
region := identityData.Region.ValueString()
|
||||||
instanceId := identityData.InstanceID.ValueString()
|
instanceId := identityData.InstanceID.ValueString()
|
||||||
databaseId := identityData.DatabaseID.ValueInt64()
|
databaseId := identityData.DatabaseID.ValueInt32()
|
||||||
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
|
||||||
|
|
@ -586,14 +581,14 @@ func (r *databaseResource) ImportState(
|
||||||
func (r *databaseResource) extractIdentityData(
|
func (r *databaseResource) extractIdentityData(
|
||||||
model resourceModel,
|
model resourceModel,
|
||||||
identity DatabaseResourceIdentityModel,
|
identity DatabaseResourceIdentityModel,
|
||||||
) (projectId, region, instanceId string, databaseId int64, err error) {
|
) (projectId, region, instanceId string, databaseId int32, err error) {
|
||||||
if !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() {
|
if !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() {
|
||||||
databaseId = model.DatabaseId.ValueInt64()
|
databaseId = model.DatabaseId.ValueInt32()
|
||||||
} else {
|
} else {
|
||||||
if identity.DatabaseID.IsNull() || identity.DatabaseID.IsUnknown() {
|
if identity.DatabaseID.IsNull() || identity.DatabaseID.IsUnknown() {
|
||||||
return "", "", "", 0, fmt.Errorf("database_id not found in config")
|
return "", "", "", 0, fmt.Errorf("database_id not found in config")
|
||||||
}
|
}
|
||||||
databaseId = identity.DatabaseID.ValueInt64()
|
databaseId = identity.DatabaseID.ValueInt32()
|
||||||
}
|
}
|
||||||
|
|
||||||
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
||||||
|
|
|
||||||
|
|
@ -14,13 +14,13 @@ import (
|
||||||
func DatabaseResourceSchema(ctx context.Context) schema.Schema {
|
func DatabaseResourceSchema(ctx context.Context) schema.Schema {
|
||||||
return schema.Schema{
|
return schema.Schema{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"database_id": schema.Int64Attribute{
|
"database_id": schema.Int32Attribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The ID of the database.",
|
Description: "The ID of the database.",
|
||||||
MarkdownDescription: "The ID of the database.",
|
MarkdownDescription: "The ID of the database.",
|
||||||
},
|
},
|
||||||
"id": schema.Int64Attribute{
|
"id": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The id of the database.",
|
Description: "The id of the database.",
|
||||||
MarkdownDescription: "The id of the database.",
|
MarkdownDescription: "The id of the database.",
|
||||||
|
|
@ -64,8 +64,8 @@ func DatabaseResourceSchema(ctx context.Context) schema.Schema {
|
||||||
}
|
}
|
||||||
|
|
||||||
type DatabaseModel struct {
|
type DatabaseModel struct {
|
||||||
DatabaseId types.Int64 `tfsdk:"database_id"`
|
DatabaseId types.Int32 `tfsdk:"database_id"`
|
||||||
Id types.Int64 `tfsdk:"id"`
|
Id types.Int32 `tfsdk:"id"`
|
||||||
InstanceId types.String `tfsdk:"instance_id"`
|
InstanceId types.String `tfsdk:"instance_id"`
|
||||||
Name types.String `tfsdk:"name"`
|
Name types.String `tfsdk:"name"`
|
||||||
Owner types.String `tfsdk:"owner"`
|
Owner types.String `tfsdk:"owner"`
|
||||||
|
|
|
||||||
|
|
@ -8,8 +8,8 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
|
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
|
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
|
|
@ -30,13 +30,13 @@ type FlavorModel struct {
|
||||||
ProjectId types.String `tfsdk:"project_id"`
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
Region types.String `tfsdk:"region"`
|
Region types.String `tfsdk:"region"`
|
||||||
StorageClass types.String `tfsdk:"storage_class"`
|
StorageClass types.String `tfsdk:"storage_class"`
|
||||||
Cpu types.Int64 `tfsdk:"cpu"`
|
Cpu types.Int32 `tfsdk:"cpu"`
|
||||||
Description types.String `tfsdk:"description"`
|
Description types.String `tfsdk:"description"`
|
||||||
Id types.String `tfsdk:"id"`
|
Id types.String `tfsdk:"id"`
|
||||||
FlavorId types.String `tfsdk:"flavor_id"`
|
FlavorId types.String `tfsdk:"flavor_id"`
|
||||||
MaxGb types.Int64 `tfsdk:"max_gb"`
|
MaxGb types.Int32 `tfsdk:"max_gb"`
|
||||||
Memory types.Int64 `tfsdk:"ram"`
|
Memory types.Int32 `tfsdk:"ram"`
|
||||||
MinGb types.Int64 `tfsdk:"min_gb"`
|
MinGb types.Int32 `tfsdk:"min_gb"`
|
||||||
NodeType types.String `tfsdk:"node_type"`
|
NodeType types.String `tfsdk:"node_type"`
|
||||||
StorageClasses types.List `tfsdk:"storage_classes"`
|
StorageClasses types.List `tfsdk:"storage_classes"`
|
||||||
}
|
}
|
||||||
|
|
@ -48,7 +48,7 @@ func NewFlavorDataSource() datasource.DataSource {
|
||||||
|
|
||||||
// flavorDataSource is the data source implementation.
|
// flavorDataSource is the data source implementation.
|
||||||
type flavorDataSource struct {
|
type flavorDataSource struct {
|
||||||
client *postgresflexalpha.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -86,12 +86,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
|
||||||
Description: "The flavor description.",
|
Description: "The flavor description.",
|
||||||
MarkdownDescription: "The flavor description.",
|
MarkdownDescription: "The flavor description.",
|
||||||
},
|
},
|
||||||
"cpu": schema.Int64Attribute{
|
"cpu": schema.Int32Attribute{
|
||||||
Required: true,
|
Required: true,
|
||||||
Description: "The cpu count of the instance.",
|
Description: "The cpu count of the instance.",
|
||||||
MarkdownDescription: "The cpu count of the instance.",
|
MarkdownDescription: "The cpu count of the instance.",
|
||||||
},
|
},
|
||||||
"ram": schema.Int64Attribute{
|
"ram": schema.Int32Attribute{
|
||||||
Required: true,
|
Required: true,
|
||||||
Description: "The memory of the instance in Gibibyte.",
|
Description: "The memory of the instance in Gibibyte.",
|
||||||
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
||||||
|
|
@ -116,12 +116,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
|
||||||
Description: "The flavor id of the instance flavor.",
|
Description: "The flavor id of the instance flavor.",
|
||||||
MarkdownDescription: "The flavor id of the instance flavor.",
|
MarkdownDescription: "The flavor id of the instance flavor.",
|
||||||
},
|
},
|
||||||
"max_gb": schema.Int64Attribute{
|
"max_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
},
|
},
|
||||||
"min_gb": schema.Int64Attribute{
|
"min_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "minimum storage which is required to order in Gigabyte.",
|
Description: "minimum storage which is required to order in Gigabyte.",
|
||||||
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
||||||
|
|
@ -138,10 +138,10 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
|
||||||
"class": schema.StringAttribute{
|
"class": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_io_per_sec": schema.Int64Attribute{
|
"max_io_per_sec": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_through_in_mb": schema.Int64Attribute{
|
"max_through_in_mb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -171,25 +171,25 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
flavors, err := getAllFlavors(ctx, r.client, projectId, region)
|
flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var foundFlavors []postgresflexalpha.ListFlavors
|
var foundFlavors []v3alpha1api.ListFlavors
|
||||||
for _, flavor := range flavors {
|
for _, flavor := range flavors {
|
||||||
if model.Cpu.ValueInt64() != *flavor.Cpu {
|
if model.Cpu.ValueInt32() != flavor.Cpu {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if model.Memory.ValueInt64() != *flavor.Memory {
|
if model.Memory.ValueInt32() != flavor.Memory {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if model.NodeType.ValueString() != *flavor.NodeType {
|
if model.NodeType.ValueString() != flavor.NodeType {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
for _, sc := range *flavor.StorageClasses {
|
for _, sc := range flavor.StorageClasses {
|
||||||
if model.StorageClass.ValueString() != *sc.Class {
|
if model.StorageClass.ValueString() != sc.Class {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
foundFlavors = append(foundFlavors, flavor)
|
foundFlavors = append(foundFlavors, flavor)
|
||||||
|
|
@ -205,11 +205,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
|
||||||
}
|
}
|
||||||
|
|
||||||
f := foundFlavors[0]
|
f := foundFlavors[0]
|
||||||
model.Description = types.StringValue(*f.Description)
|
model.Description = types.StringValue(f.Description)
|
||||||
model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
|
model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id)
|
||||||
model.FlavorId = types.StringValue(*f.Id)
|
model.FlavorId = types.StringValue(f.Id)
|
||||||
model.MaxGb = types.Int64Value(*f.MaxGB)
|
model.MaxGb = types.Int32Value(f.MaxGB)
|
||||||
model.MinGb = types.Int64Value(*f.MinGB)
|
model.MinGb = types.Int32Value(f.MinGB)
|
||||||
|
|
||||||
if f.StorageClasses == nil {
|
if f.StorageClasses == nil {
|
||||||
model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{
|
model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{
|
||||||
|
|
@ -219,15 +219,15 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
var scList []attr.Value
|
var scList []attr.Value
|
||||||
for _, sc := range *f.StorageClasses {
|
for _, sc := range f.StorageClasses {
|
||||||
scList = append(
|
scList = append(
|
||||||
scList,
|
scList,
|
||||||
postgresflexalphaGen.NewStorageClassesValueMust(
|
postgresflexalphaGen.NewStorageClassesValueMust(
|
||||||
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
|
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"class": types.StringValue(*sc.Class),
|
"class": types.StringValue(sc.Class),
|
||||||
"max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
|
"max_io_per_sec": types.Int32Value(sc.MaxIoPerSec),
|
||||||
"max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
|
"max_through_in_mb": types.Int32Value(sc.MaxThroughInMb),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
"flavors": schema.ListNestedAttribute{
|
"flavors": schema.ListNestedAttribute{
|
||||||
NestedObject: schema.NestedAttributeObject{
|
NestedObject: schema.NestedAttributeObject{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"cpu": schema.Int64Attribute{
|
"cpu": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The cpu count of the instance.",
|
Description: "The cpu count of the instance.",
|
||||||
MarkdownDescription: "The cpu count of the instance.",
|
MarkdownDescription: "The cpu count of the instance.",
|
||||||
|
|
@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "The id of the instance flavor.",
|
Description: "The id of the instance flavor.",
|
||||||
MarkdownDescription: "The id of the instance flavor.",
|
MarkdownDescription: "The id of the instance flavor.",
|
||||||
},
|
},
|
||||||
"max_gb": schema.Int64Attribute{
|
"max_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
},
|
},
|
||||||
"memory": schema.Int64Attribute{
|
"memory": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The memory of the instance in Gibibyte.",
|
Description: "The memory of the instance in Gibibyte.",
|
||||||
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
||||||
},
|
},
|
||||||
"min_gb": schema.Int64Attribute{
|
"min_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "minimum storage which is required to order in Gigabyte.",
|
Description: "minimum storage which is required to order in Gigabyte.",
|
||||||
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
||||||
|
|
@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
"class": schema.StringAttribute{
|
"class": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_io_per_sec": schema.Int64Attribute{
|
"max_io_per_sec": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_through_in_mb": schema.Int64Attribute{
|
"max_through_in_mb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "List of flavors available for the project.",
|
Description: "List of flavors available for the project.",
|
||||||
MarkdownDescription: "List of flavors available for the project.",
|
MarkdownDescription: "List of flavors available for the project.",
|
||||||
},
|
},
|
||||||
"page": schema.Int64Attribute{
|
"page": schema.Int32Attribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "Number of the page of items list to be returned.",
|
Description: "Number of the page of items list to be returned.",
|
||||||
|
|
@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
},
|
},
|
||||||
"pagination": schema.SingleNestedAttribute{
|
"pagination": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"page": schema.Int64Attribute{
|
"page": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"size": schema.Int64Attribute{
|
"size": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"sort": schema.StringAttribute{
|
"sort": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"total_pages": schema.Int64Attribute{
|
"total_pages": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"total_rows": schema.Int64Attribute{
|
"total_rows": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"size": schema.Int64Attribute{
|
"size": schema.Int32Attribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "Number of items to be returned on each page.",
|
Description: "Number of items to be returned on each page.",
|
||||||
|
|
@ -178,11 +178,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
|
|
||||||
type FlavorsModel struct {
|
type FlavorsModel struct {
|
||||||
Flavors types.List `tfsdk:"flavors"`
|
Flavors types.List `tfsdk:"flavors"`
|
||||||
Page types.Int64 `tfsdk:"page"`
|
Page types.Int32 `tfsdk:"page"`
|
||||||
Pagination PaginationValue `tfsdk:"pagination"`
|
Pagination PaginationValue `tfsdk:"pagination"`
|
||||||
ProjectId types.String `tfsdk:"project_id"`
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
Region types.String `tfsdk:"region"`
|
Region types.String `tfsdk:"region"`
|
||||||
Size types.Int64 `tfsdk:"size"`
|
Size types.Int32 `tfsdk:"size"`
|
||||||
Sort types.String `tfsdk:"sort"`
|
Sort types.String `tfsdk:"sort"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -221,12 +221,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
|
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
|
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
descriptionAttribute, ok := attributes["description"]
|
descriptionAttribute, ok := attributes["description"]
|
||||||
|
|
@ -275,12 +275,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
|
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
|
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryAttribute, ok := attributes["memory"]
|
memoryAttribute, ok := attributes["memory"]
|
||||||
|
|
@ -293,12 +293,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
|
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
|
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbAttribute, ok := attributes["min_gb"]
|
minGbAttribute, ok := attributes["min_gb"]
|
||||||
|
|
@ -311,12 +311,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
|
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
|
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeTypeAttribute, ok := attributes["node_type"]
|
nodeTypeAttribute, ok := attributes["node_type"]
|
||||||
|
|
@ -445,12 +445,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
|
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
|
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
descriptionAttribute, ok := attributes["description"]
|
descriptionAttribute, ok := attributes["description"]
|
||||||
|
|
@ -499,12 +499,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
|
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
|
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryAttribute, ok := attributes["memory"]
|
memoryAttribute, ok := attributes["memory"]
|
||||||
|
|
@ -517,12 +517,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
|
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
|
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbAttribute, ok := attributes["min_gb"]
|
minGbAttribute, ok := attributes["min_gb"]
|
||||||
|
|
@ -535,12 +535,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
|
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
|
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeTypeAttribute, ok := attributes["node_type"]
|
nodeTypeAttribute, ok := attributes["node_type"]
|
||||||
|
|
@ -664,12 +664,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
|
||||||
var _ basetypes.ObjectValuable = FlavorsValue{}
|
var _ basetypes.ObjectValuable = FlavorsValue{}
|
||||||
|
|
||||||
type FlavorsValue struct {
|
type FlavorsValue struct {
|
||||||
Cpu basetypes.Int64Value `tfsdk:"cpu"`
|
Cpu basetypes.Int32Value `tfsdk:"cpu"`
|
||||||
Description basetypes.StringValue `tfsdk:"description"`
|
Description basetypes.StringValue `tfsdk:"description"`
|
||||||
Id basetypes.StringValue `tfsdk:"id"`
|
Id basetypes.StringValue `tfsdk:"id"`
|
||||||
MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
|
MaxGb basetypes.Int32Value `tfsdk:"max_gb"`
|
||||||
Memory basetypes.Int64Value `tfsdk:"memory"`
|
Memory basetypes.Int32Value `tfsdk:"memory"`
|
||||||
MinGb basetypes.Int64Value `tfsdk:"min_gb"`
|
MinGb basetypes.Int32Value `tfsdk:"min_gb"`
|
||||||
NodeType basetypes.StringValue `tfsdk:"node_type"`
|
NodeType basetypes.StringValue `tfsdk:"node_type"`
|
||||||
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
|
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
|
|
@ -681,12 +681,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
|
||||||
var val tftypes.Value
|
var val tftypes.Value
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["storage_classes"] = basetypes.ListType{
|
attrTypes["storage_classes"] = basetypes.ListType{
|
||||||
ElemType: StorageClassesValue{}.Type(ctx),
|
ElemType: StorageClassesValue{}.Type(ctx),
|
||||||
|
|
@ -821,12 +821,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
"cpu": basetypes.Int64Type{},
|
"cpu": basetypes.Int32Type{},
|
||||||
"description": basetypes.StringType{},
|
"description": basetypes.StringType{},
|
||||||
"id": basetypes.StringType{},
|
"id": basetypes.StringType{},
|
||||||
"max_gb": basetypes.Int64Type{},
|
"max_gb": basetypes.Int32Type{},
|
||||||
"memory": basetypes.Int64Type{},
|
"memory": basetypes.Int32Type{},
|
||||||
"min_gb": basetypes.Int64Type{},
|
"min_gb": basetypes.Int32Type{},
|
||||||
"node_type": basetypes.StringType{},
|
"node_type": basetypes.StringType{},
|
||||||
"storage_classes": basetypes.ListType{
|
"storage_classes": basetypes.ListType{
|
||||||
ElemType: StorageClassesValue{}.Type(ctx),
|
ElemType: StorageClassesValue{}.Type(ctx),
|
||||||
|
|
@ -917,12 +917,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
|
||||||
|
|
||||||
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"cpu": basetypes.Int64Type{},
|
"cpu": basetypes.Int32Type{},
|
||||||
"description": basetypes.StringType{},
|
"description": basetypes.StringType{},
|
||||||
"id": basetypes.StringType{},
|
"id": basetypes.StringType{},
|
||||||
"max_gb": basetypes.Int64Type{},
|
"max_gb": basetypes.Int32Type{},
|
||||||
"memory": basetypes.Int64Type{},
|
"memory": basetypes.Int32Type{},
|
||||||
"min_gb": basetypes.Int64Type{},
|
"min_gb": basetypes.Int32Type{},
|
||||||
"node_type": basetypes.StringType{},
|
"node_type": basetypes.StringType{},
|
||||||
"storage_classes": basetypes.ListType{
|
"storage_classes": basetypes.ListType{
|
||||||
ElemType: StorageClassesValue{}.Type(ctx),
|
ElemType: StorageClassesValue{}.Type(ctx),
|
||||||
|
|
@ -983,12 +983,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
|
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
|
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
|
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
|
||||||
|
|
@ -1001,12 +1001,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
|
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
|
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -1112,12 +1112,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
|
||||||
return NewStorageClassesValueUnknown(), diags
|
return NewStorageClassesValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
|
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
|
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
|
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
|
||||||
|
|
@ -1130,12 +1130,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
|
||||||
return NewStorageClassesValueUnknown(), diags
|
return NewStorageClassesValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
|
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
|
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -1219,8 +1219,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
|
||||||
|
|
||||||
type StorageClassesValue struct {
|
type StorageClassesValue struct {
|
||||||
Class basetypes.StringValue `tfsdk:"class"`
|
Class basetypes.StringValue `tfsdk:"class"`
|
||||||
MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
|
MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"`
|
||||||
MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
|
MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1231,8 +1231,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
|
|
||||||
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
||||||
|
|
||||||
|
|
@ -1295,8 +1295,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
"class": basetypes.StringType{},
|
"class": basetypes.StringType{},
|
||||||
"max_io_per_sec": basetypes.Int64Type{},
|
"max_io_per_sec": basetypes.Int32Type{},
|
||||||
"max_through_in_mb": basetypes.Int64Type{},
|
"max_through_in_mb": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
|
|
||||||
if v.IsNull() {
|
if v.IsNull() {
|
||||||
|
|
@ -1359,8 +1359,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
|
||||||
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"class": basetypes.StringType{},
|
"class": basetypes.StringType{},
|
||||||
"max_io_per_sec": basetypes.Int64Type{},
|
"max_io_per_sec": basetypes.Int32Type{},
|
||||||
"max_through_in_mb": basetypes.Int64Type{},
|
"max_through_in_mb": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1399,12 +1399,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
pageVal, ok := pageAttribute.(basetypes.Int64Value)
|
pageVal, ok := pageAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
|
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeAttribute, ok := attributes["size"]
|
sizeAttribute, ok := attributes["size"]
|
||||||
|
|
@ -1417,12 +1417,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
|
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
|
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sortAttribute, ok := attributes["sort"]
|
sortAttribute, ok := attributes["sort"]
|
||||||
|
|
@ -1453,12 +1453,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
|
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
|
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsAttribute, ok := attributes["total_rows"]
|
totalRowsAttribute, ok := attributes["total_rows"]
|
||||||
|
|
@ -1471,12 +1471,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
|
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
|
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -1566,12 +1566,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
pageVal, ok := pageAttribute.(basetypes.Int64Value)
|
pageVal, ok := pageAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
|
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeAttribute, ok := attributes["size"]
|
sizeAttribute, ok := attributes["size"]
|
||||||
|
|
@ -1584,12 +1584,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
|
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
|
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sortAttribute, ok := attributes["sort"]
|
sortAttribute, ok := attributes["sort"]
|
||||||
|
|
@ -1620,12 +1620,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
|
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
|
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsAttribute, ok := attributes["total_rows"]
|
totalRowsAttribute, ok := attributes["total_rows"]
|
||||||
|
|
@ -1638,12 +1638,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
|
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
|
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -1728,11 +1728,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
|
||||||
var _ basetypes.ObjectValuable = PaginationValue{}
|
var _ basetypes.ObjectValuable = PaginationValue{}
|
||||||
|
|
||||||
type PaginationValue struct {
|
type PaginationValue struct {
|
||||||
Page basetypes.Int64Value `tfsdk:"page"`
|
Page basetypes.Int32Value `tfsdk:"page"`
|
||||||
Size basetypes.Int64Value `tfsdk:"size"`
|
Size basetypes.Int32Value `tfsdk:"size"`
|
||||||
Sort basetypes.StringValue `tfsdk:"sort"`
|
Sort basetypes.StringValue `tfsdk:"sort"`
|
||||||
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
|
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
|
||||||
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
|
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1742,11 +1742,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
|
||||||
var val tftypes.Value
|
var val tftypes.Value
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
|
|
||||||
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
||||||
|
|
||||||
|
|
@ -1824,11 +1824,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
|
||||||
var diags diag.Diagnostics
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
"page": basetypes.Int64Type{},
|
"page": basetypes.Int32Type{},
|
||||||
"size": basetypes.Int64Type{},
|
"size": basetypes.Int32Type{},
|
||||||
"sort": basetypes.StringType{},
|
"sort": basetypes.StringType{},
|
||||||
"total_pages": basetypes.Int64Type{},
|
"total_pages": basetypes.Int32Type{},
|
||||||
"total_rows": basetypes.Int64Type{},
|
"total_rows": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
|
|
||||||
if v.IsNull() {
|
if v.IsNull() {
|
||||||
|
|
@ -1900,10 +1900,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
|
||||||
|
|
||||||
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"page": basetypes.Int64Type{},
|
"page": basetypes.Int32Type{},
|
||||||
"size": basetypes.Int64Type{},
|
"size": basetypes.Int32Type{},
|
||||||
"sort": basetypes.StringType{},
|
"sort": basetypes.StringType{},
|
||||||
"total_pages": basetypes.Int64Type{},
|
"total_pages": basetypes.Int32Type{},
|
||||||
"total_rows": basetypes.Int64Type{},
|
"total_rows": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,21 +4,21 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type flavorsClientReader interface {
|
type flavorsClientReader interface {
|
||||||
GetFlavorsRequest(
|
GetFlavorsRequest(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
projectId, region string,
|
projectId, region string,
|
||||||
) postgresflex.ApiGetFlavorsRequestRequest
|
) v3alpha1api.ApiGetFlavorsRequestRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
|
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
|
||||||
[]postgresflex.ListFlavors,
|
[]v3alpha1api.ListFlavors,
|
||||||
error,
|
error,
|
||||||
) {
|
) {
|
||||||
getAllFilter := func(_ postgresflex.ListFlavors) bool { return true }
|
getAllFilter := func(_ v3alpha1api.ListFlavors) bool { return true }
|
||||||
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
|
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -32,29 +32,29 @@ func getFlavorsByFilter(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
client flavorsClientReader,
|
client flavorsClientReader,
|
||||||
projectId, region string,
|
projectId, region string,
|
||||||
filter func(db postgresflex.ListFlavors) bool,
|
filter func(db v3alpha1api.ListFlavors) bool,
|
||||||
) ([]postgresflex.ListFlavors, error) {
|
) ([]v3alpha1api.ListFlavors, error) {
|
||||||
if projectId == "" || region == "" {
|
if projectId == "" || region == "" {
|
||||||
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
|
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
|
||||||
}
|
}
|
||||||
|
|
||||||
const pageSize = 25
|
const pageSize = 25
|
||||||
|
|
||||||
var result = make([]postgresflex.ListFlavors, 0)
|
var result = make([]v3alpha1api.ListFlavors, 0)
|
||||||
|
|
||||||
for page := int32(1); ; page++ {
|
for page := int32(1); ; page++ {
|
||||||
res, err := client.GetFlavorsRequest(ctx, projectId, region).
|
res, err := client.GetFlavorsRequest(ctx, projectId, region).
|
||||||
Page(page).Size(pageSize).Sort(postgresflex.FLAVORSORT_ID_ASC).Execute()
|
Page(page).Size(pageSize).Sort(v3alpha1api.FLAVORSORT_ID_ASC).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
|
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the API returns no flavors, we have reached the end of the list.
|
// If the API returns no flavors, we have reached the end of the list.
|
||||||
if res.Flavors == nil || len(*res.Flavors) == 0 {
|
if len(res.Flavors) == 0 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, flavor := range *res.Flavors {
|
for _, flavor := range res.Flavors {
|
||||||
if filter(flavor) {
|
if filter(flavor) {
|
||||||
result = append(result, flavor)
|
result = append(result, flavor)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,9 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type mockRequest struct {
|
type mockRequest struct {
|
||||||
|
|
@ -30,25 +28,25 @@ func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) po
|
||||||
return m.executeRequest()
|
return m.executeRequest()
|
||||||
}
|
}
|
||||||
|
|
||||||
var mockResp = func(page int64) (*postgresflex.GetFlavorsResponse, error) {
|
var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) {
|
||||||
if page == 1 {
|
if page == 1 {
|
||||||
return &postgresflex.GetFlavorsResponse{
|
return &postgresflex.GetFlavorsResponse{
|
||||||
Flavors: &[]postgresflex.ListFlavors{
|
Flavors: []postgresflex.ListFlavors{
|
||||||
{Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
|
{Id: "flavor-1", Description: "first"},
|
||||||
{Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
|
{Id: "flavor-2", Description: "second"},
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
if page == 2 {
|
if page == 2 {
|
||||||
return &postgresflex.GetFlavorsResponse{
|
return &postgresflex.GetFlavorsResponse{
|
||||||
Flavors: &[]postgresflex.ListFlavors{
|
Flavors: []postgresflex.ListFlavors{
|
||||||
{Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
|
{Id: "flavor-3", Description: "three"},
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return &postgresflex.GetFlavorsResponse{
|
return &postgresflex.GetFlavorsResponse{
|
||||||
Flavors: &[]postgresflex.ListFlavors{},
|
Flavors: []postgresflex.ListFlavors{},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -72,7 +70,7 @@ func TestGetFlavorsByFilter(t *testing.T) {
|
||||||
{
|
{
|
||||||
description: "Success - Filter flavors by description",
|
description: "Success - Filter flavors by description",
|
||||||
projectId: "pid", region: "reg",
|
projectId: "pid", region: "reg",
|
||||||
filter: func(f postgresflex.ListFlavors) bool { return *f.Description == "first" },
|
filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" },
|
||||||
wantCount: 1,
|
wantCount: 1,
|
||||||
wantErr: false,
|
wantErr: false,
|
||||||
},
|
},
|
||||||
|
|
@ -86,10 +84,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(
|
||||||
tt.description, func(t *testing.T) {
|
tt.description, func(t *testing.T) {
|
||||||
var currentPage int64
|
var currentPage int32
|
||||||
client := &mockFlavorsClient{
|
client := &mockFlavorsClient{
|
||||||
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
return &mockRequest{
|
return mockRequest{
|
||||||
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
currentPage++
|
currentPage++
|
||||||
return mockResp(currentPage)
|
return mockResp(currentPage)
|
||||||
|
|
@ -113,10 +111,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetAllFlavors(t *testing.T) {
|
func TestGetAllFlavors(t *testing.T) {
|
||||||
var currentPage int64
|
var currentPage int32
|
||||||
client := &mockFlavorsClient{
|
client := &mockFlavorsClient{
|
||||||
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
return &mockRequest{
|
return mockRequest{
|
||||||
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
currentPage++
|
currentPage++
|
||||||
return mockResp(currentPage)
|
return mockResp(currentPage)
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,8 @@ import (
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/datasource"
|
"github.com/hashicorp/terraform-plugin-framework/datasource"
|
||||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
|
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
|
||||||
|
|
@ -26,7 +26,7 @@ func NewFlavorsDataSource() datasource.DataSource {
|
||||||
type dataSourceModel = postgresflexalphaGen.FlavorsModel
|
type dataSourceModel = postgresflexalphaGen.FlavorsModel
|
||||||
|
|
||||||
type flavorsDataSource struct {
|
type flavorsDataSource struct {
|
||||||
client *postgresflexalpha.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
"flavors": schema.ListNestedAttribute{
|
"flavors": schema.ListNestedAttribute{
|
||||||
NestedObject: schema.NestedAttributeObject{
|
NestedObject: schema.NestedAttributeObject{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"cpu": schema.Int64Attribute{
|
"cpu": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The cpu count of the instance.",
|
Description: "The cpu count of the instance.",
|
||||||
MarkdownDescription: "The cpu count of the instance.",
|
MarkdownDescription: "The cpu count of the instance.",
|
||||||
|
|
@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "The id of the instance flavor.",
|
Description: "The id of the instance flavor.",
|
||||||
MarkdownDescription: "The id of the instance flavor.",
|
MarkdownDescription: "The id of the instance flavor.",
|
||||||
},
|
},
|
||||||
"max_gb": schema.Int64Attribute{
|
"max_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
},
|
},
|
||||||
"memory": schema.Int64Attribute{
|
"memory": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The memory of the instance in Gibibyte.",
|
Description: "The memory of the instance in Gibibyte.",
|
||||||
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
||||||
},
|
},
|
||||||
"min_gb": schema.Int64Attribute{
|
"min_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "minimum storage which is required to order in Gigabyte.",
|
Description: "minimum storage which is required to order in Gigabyte.",
|
||||||
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
||||||
|
|
@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
"class": schema.StringAttribute{
|
"class": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_io_per_sec": schema.Int64Attribute{
|
"max_io_per_sec": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_through_in_mb": schema.Int64Attribute{
|
"max_through_in_mb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "List of flavors available for the project.",
|
Description: "List of flavors available for the project.",
|
||||||
MarkdownDescription: "List of flavors available for the project.",
|
MarkdownDescription: "List of flavors available for the project.",
|
||||||
},
|
},
|
||||||
"page": schema.Int64Attribute{
|
"page": schema.Int32Attribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "Number of the page of items list to be returned.",
|
Description: "Number of the page of items list to be returned.",
|
||||||
|
|
@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
},
|
},
|
||||||
"pagination": schema.SingleNestedAttribute{
|
"pagination": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"page": schema.Int64Attribute{
|
"page": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"size": schema.Int64Attribute{
|
"size": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"sort": schema.StringAttribute{
|
"sort": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"total_pages": schema.Int64Attribute{
|
"total_pages": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"total_rows": schema.Int64Attribute{
|
"total_rows": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"size": schema.Int64Attribute{
|
"size": schema.Int32Attribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "Number of items to be returned on each page.",
|
Description: "Number of items to be returned on each page.",
|
||||||
|
|
@ -176,11 +176,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
|
|
||||||
type FlavorsModel struct {
|
type FlavorsModel struct {
|
||||||
Flavors types.List `tfsdk:"flavors"`
|
Flavors types.List `tfsdk:"flavors"`
|
||||||
Page types.Int64 `tfsdk:"page"`
|
Page types.Int32 `tfsdk:"page"`
|
||||||
Pagination PaginationValue `tfsdk:"pagination"`
|
Pagination PaginationValue `tfsdk:"pagination"`
|
||||||
ProjectId types.String `tfsdk:"project_id"`
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
Region types.String `tfsdk:"region"`
|
Region types.String `tfsdk:"region"`
|
||||||
Size types.Int64 `tfsdk:"size"`
|
Size types.Int32 `tfsdk:"size"`
|
||||||
Sort types.String `tfsdk:"sort"`
|
Sort types.String `tfsdk:"sort"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -219,12 +219,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
|
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
|
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
descriptionAttribute, ok := attributes["description"]
|
descriptionAttribute, ok := attributes["description"]
|
||||||
|
|
@ -273,12 +273,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
|
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
|
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryAttribute, ok := attributes["memory"]
|
memoryAttribute, ok := attributes["memory"]
|
||||||
|
|
@ -291,12 +291,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
|
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
|
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbAttribute, ok := attributes["min_gb"]
|
minGbAttribute, ok := attributes["min_gb"]
|
||||||
|
|
@ -309,12 +309,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
|
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
|
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeTypeAttribute, ok := attributes["node_type"]
|
nodeTypeAttribute, ok := attributes["node_type"]
|
||||||
|
|
@ -443,12 +443,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
|
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
|
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
descriptionAttribute, ok := attributes["description"]
|
descriptionAttribute, ok := attributes["description"]
|
||||||
|
|
@ -497,12 +497,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
|
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
|
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryAttribute, ok := attributes["memory"]
|
memoryAttribute, ok := attributes["memory"]
|
||||||
|
|
@ -515,12 +515,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
|
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
|
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbAttribute, ok := attributes["min_gb"]
|
minGbAttribute, ok := attributes["min_gb"]
|
||||||
|
|
@ -533,12 +533,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
|
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
|
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeTypeAttribute, ok := attributes["node_type"]
|
nodeTypeAttribute, ok := attributes["node_type"]
|
||||||
|
|
@ -662,12 +662,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
|
||||||
var _ basetypes.ObjectValuable = FlavorsValue{}
|
var _ basetypes.ObjectValuable = FlavorsValue{}
|
||||||
|
|
||||||
type FlavorsValue struct {
|
type FlavorsValue struct {
|
||||||
Cpu basetypes.Int64Value `tfsdk:"cpu"`
|
Cpu basetypes.Int32Value `tfsdk:"cpu"`
|
||||||
Description basetypes.StringValue `tfsdk:"description"`
|
Description basetypes.StringValue `tfsdk:"description"`
|
||||||
Id basetypes.StringValue `tfsdk:"id"`
|
Id basetypes.StringValue `tfsdk:"id"`
|
||||||
MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
|
MaxGb basetypes.Int32Value `tfsdk:"max_gb"`
|
||||||
Memory basetypes.Int64Value `tfsdk:"memory"`
|
Memory basetypes.Int32Value `tfsdk:"memory"`
|
||||||
MinGb basetypes.Int64Value `tfsdk:"min_gb"`
|
MinGb basetypes.Int32Value `tfsdk:"min_gb"`
|
||||||
NodeType basetypes.StringValue `tfsdk:"node_type"`
|
NodeType basetypes.StringValue `tfsdk:"node_type"`
|
||||||
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
|
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
|
|
@ -679,12 +679,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
|
||||||
var val tftypes.Value
|
var val tftypes.Value
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["storage_classes"] = basetypes.ListType{
|
attrTypes["storage_classes"] = basetypes.ListType{
|
||||||
ElemType: StorageClassesValue{}.Type(ctx),
|
ElemType: StorageClassesValue{}.Type(ctx),
|
||||||
|
|
@ -819,12 +819,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
"cpu": basetypes.Int64Type{},
|
"cpu": basetypes.Int32Type{},
|
||||||
"description": basetypes.StringType{},
|
"description": basetypes.StringType{},
|
||||||
"id": basetypes.StringType{},
|
"id": basetypes.StringType{},
|
||||||
"max_gb": basetypes.Int64Type{},
|
"max_gb": basetypes.Int32Type{},
|
||||||
"memory": basetypes.Int64Type{},
|
"memory": basetypes.Int32Type{},
|
||||||
"min_gb": basetypes.Int64Type{},
|
"min_gb": basetypes.Int32Type{},
|
||||||
"node_type": basetypes.StringType{},
|
"node_type": basetypes.StringType{},
|
||||||
"storage_classes": basetypes.ListType{
|
"storage_classes": basetypes.ListType{
|
||||||
ElemType: StorageClassesValue{}.Type(ctx),
|
ElemType: StorageClassesValue{}.Type(ctx),
|
||||||
|
|
@ -915,12 +915,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
|
||||||
|
|
||||||
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"cpu": basetypes.Int64Type{},
|
"cpu": basetypes.Int32Type{},
|
||||||
"description": basetypes.StringType{},
|
"description": basetypes.StringType{},
|
||||||
"id": basetypes.StringType{},
|
"id": basetypes.StringType{},
|
||||||
"max_gb": basetypes.Int64Type{},
|
"max_gb": basetypes.Int32Type{},
|
||||||
"memory": basetypes.Int64Type{},
|
"memory": basetypes.Int32Type{},
|
||||||
"min_gb": basetypes.Int64Type{},
|
"min_gb": basetypes.Int32Type{},
|
||||||
"node_type": basetypes.StringType{},
|
"node_type": basetypes.StringType{},
|
||||||
"storage_classes": basetypes.ListType{
|
"storage_classes": basetypes.ListType{
|
||||||
ElemType: StorageClassesValue{}.Type(ctx),
|
ElemType: StorageClassesValue{}.Type(ctx),
|
||||||
|
|
@ -981,12 +981,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
|
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
|
||||||