ref 635294: server update schedules (#607)
Signed-off-by: Adrian Nackov <adrian.nackov@mail.schwarz>
This commit is contained in:
parent
1e1507bd96
commit
100704c0f4
18 changed files with 1564 additions and 1 deletions
41
docs/data-sources/server_update_schedule.md
Normal file
41
docs/data-sources/server_update_schedule.md
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackit_server_update_schedule Data Source - stackit"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
Server update schedule datasource schema. Must have a region specified in the provider configuration.
|
||||||
|
~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our guide https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources for how to opt-in to use beta resources.
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackit_server_update_schedule (Data Source)
|
||||||
|
|
||||||
|
Server update schedule datasource schema. Must have a `region` specified in the provider configuration.
|
||||||
|
|
||||||
|
~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our [guide](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources) for how to opt-in to use beta resources.
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackit_server_update_schedule" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
update_schedule_id = xxxxx
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `project_id` (String) STACKIT Project ID to which the server is associated.
|
||||||
|
- `server_id` (String) Server ID for the update schedule.
|
||||||
|
- `update_schedule_id` (Number) Update schedule ID.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `enabled` (Boolean) Is the update schedule enabled or disabled.
|
||||||
|
- `id` (String) Terraform's internal resource identifier. It is structured as "`project_id`,`server_id`,`update_schedule_id`".
|
||||||
|
- `maintenance_window` (Number) Maintenance window [1..24].
|
||||||
|
- `name` (String) The schedule name.
|
||||||
|
- `rrule` (String) Update schedule described in `rrule` (recurrence rule) format.
|
||||||
47
docs/data-sources/server_update_schedules.md
Normal file
47
docs/data-sources/server_update_schedules.md
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackit_server_update_schedules Data Source - stackit"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
Server update schedules datasource schema. Must have a region specified in the provider configuration.
|
||||||
|
~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our guide https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources for how to opt-in to use beta resources.
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackit_server_update_schedules (Data Source)
|
||||||
|
|
||||||
|
Server update schedules datasource schema. Must have a `region` specified in the provider configuration.
|
||||||
|
|
||||||
|
~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our [guide](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources) for how to opt-in to use beta resources.
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackit_server_update_schedules" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `project_id` (String) STACKIT Project ID (UUID) to which the server is associated.
|
||||||
|
- `server_id` (String) Server ID (UUID) to which the update schedule is associated.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (String) Terraform's internal data source identifier. It is structured as "`project_id`,`server_id`".
|
||||||
|
- `items` (Attributes List) (see [below for nested schema](#nestedatt--items))
|
||||||
|
|
||||||
|
<a id="nestedatt--items"></a>
|
||||||
|
### Nested Schema for `items`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `enabled` (Boolean) Is the update schedule enabled or disabled.
|
||||||
|
- `maintenance_window` (Number) Maintenance window [1..24].
|
||||||
|
- `name` (String) The update schedule name.
|
||||||
|
- `rrule` (String) Update schedule described in `rrule` (recurrence rule) format.
|
||||||
|
- `update_schedule_id` (Number)
|
||||||
|
|
@ -173,6 +173,7 @@ Note: AWS specific checks must be skipped as they do not work on STACKIT. For de
|
||||||
- `resourcemanager_custom_endpoint` (String) Custom endpoint for the Resource Manager service
|
- `resourcemanager_custom_endpoint` (String) Custom endpoint for the Resource Manager service
|
||||||
- `secretsmanager_custom_endpoint` (String) Custom endpoint for the Secrets Manager service
|
- `secretsmanager_custom_endpoint` (String) Custom endpoint for the Secrets Manager service
|
||||||
- `server_backup_custom_endpoint` (String) Custom endpoint for the Server Backup service
|
- `server_backup_custom_endpoint` (String) Custom endpoint for the Server Backup service
|
||||||
|
- `server_update_custom_endpoint` (String) Custom endpoint for the Server Update service
|
||||||
- `service_account_email` (String) Service account email. It can also be set using the environment variable STACKIT_SERVICE_ACCOUNT_EMAIL. It is required if you want to use the resource manager project resource.
|
- `service_account_email` (String) Service account email. It can also be set using the environment variable STACKIT_SERVICE_ACCOUNT_EMAIL. It is required if you want to use the resource manager project resource.
|
||||||
- `service_account_key` (String) Service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
- `service_account_key` (String) Service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||||
- `service_account_key_path` (String) Path for the service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
- `service_account_key_path` (String) Path for the service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||||
|
|
|
||||||
44
docs/resources/server_update_schedule.md
Normal file
44
docs/resources/server_update_schedule.md
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackit_server_update_schedule Resource - stackit"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
Server update schedule resource schema. Must have a region specified in the provider configuration.
|
||||||
|
~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our guide https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources for how to opt-in to use beta resources.
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackit_server_update_schedule (Resource)
|
||||||
|
|
||||||
|
Server update schedule resource schema. Must have a `region` specified in the provider configuration.
|
||||||
|
|
||||||
|
~> This resource is in beta and may be subject to breaking changes in the future. Use with caution. See our [guide](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources) for how to opt-in to use beta resources.
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackit_server_update_schedule" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example_update_schedule_name"
|
||||||
|
rrule = "DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"
|
||||||
|
enabled = true
|
||||||
|
maintenance_window = 1
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `enabled` (Boolean) Is the update schedule enabled or disabled.
|
||||||
|
- `maintenance_window` (Number) Maintenance window [1..24].
|
||||||
|
- `name` (String) The schedule name.
|
||||||
|
- `project_id` (String) STACKIT Project ID to which the server is associated.
|
||||||
|
- `rrule` (String) Update schedule described in `rrule` (recurrence rule) format.
|
||||||
|
- `server_id` (String) Server ID for the update schedule.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (String) Terraform's internal resource identifier. It is structured as "`project_id`,`server_id`,`update_schedule_id`".
|
||||||
|
- `update_schedule_id` (Number) Update schedule ID.
|
||||||
|
|
@ -0,0 +1,5 @@
|
||||||
|
data "stackit_server_update_schedule" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
update_schedule_id = xxxxx
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
data "stackit_server_update_schedules" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
resource "stackit_server_update_schedule" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
server_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example_update_schedule_name"
|
||||||
|
rrule = "DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"
|
||||||
|
enabled = true
|
||||||
|
maintenance_window = 1
|
||||||
|
}
|
||||||
5
go.mod
5
go.mod
|
|
@ -36,7 +36,10 @@ require (
|
||||||
golang.org/x/mod v0.22.0
|
golang.org/x/mod v0.22.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
|
require (
|
||||||
|
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.2 // indirect
|
||||||
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/ProtonMail/go-crypto v1.1.0-alpha.2 // indirect
|
github.com/ProtonMail/go-crypto v1.1.0-alpha.2 // indirect
|
||||||
|
|
|
||||||
6
go.sum
6
go.sum
|
|
@ -187,6 +187,12 @@ github.com/stackitcloud/stackit-sdk-go/services/serverbackup v0.3.0 h1:Tlps8vBQm
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/serverbackup v0.3.0/go.mod h1:+807U5ZLXns+CEbyIg483wNEwV10vaN6GjMnSZhw/64=
|
github.com/stackitcloud/stackit-sdk-go/services/serverbackup v0.3.0/go.mod h1:+807U5ZLXns+CEbyIg483wNEwV10vaN6GjMnSZhw/64=
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/serviceenablement v0.4.0 h1:K5fVTcJxjOVwJBa3kiWRsYNAq+I3jAYdU1U+f6no5lE=
|
github.com/stackitcloud/stackit-sdk-go/services/serviceenablement v0.4.0 h1:K5fVTcJxjOVwJBa3kiWRsYNAq+I3jAYdU1U+f6no5lE=
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/serviceenablement v0.4.0/go.mod h1:zyg0hpiNdZLRbelkJb2KDf9OHQKLqqcTpePQ1qHL5dE=
|
github.com/stackitcloud/stackit-sdk-go/services/serviceenablement v0.4.0/go.mod h1:zyg0hpiNdZLRbelkJb2KDf9OHQKLqqcTpePQ1qHL5dE=
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.1.0 h1:jTGCE71TX/Hafp6FqM/wIiHcTWreAktnmqEi4g97SGk=
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.1.0/go.mod h1:etidTptNDvvCPA1FGC7T9DXHxXA4bYW3qIUzWG8wVcc=
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.1 h1:fzYC2JifJUJ37mAKjGOEbNFjgK+DQRwjkoJ7NDXaQvQ=
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.1/go.mod h1:etidTptNDvvCPA1FGC7T9DXHxXA4bYW3qIUzWG8wVcc=
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.2 h1:HXZ/hjO3shFPLrrmSb1G9eXDCSYrMeYveklm6rEUd0Y=
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/serverupdate v0.2.2/go.mod h1:etidTptNDvvCPA1FGC7T9DXHxXA4bYW3qIUzWG8wVcc=
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/ske v0.20.0 h1:ssEywzCS8IdRtzyxweLUKBG5GFbgwjNWJh++wGqigJM=
|
github.com/stackitcloud/stackit-sdk-go/services/ske v0.20.0 h1:ssEywzCS8IdRtzyxweLUKBG5GFbgwjNWJh++wGqigJM=
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/ske v0.20.0/go.mod h1:A4+9KslxCA31JvxnT+O/GC67eAOdw+iqhBzewZZaCD0=
|
github.com/stackitcloud/stackit-sdk-go/services/ske v0.20.0/go.mod h1:A4+9KslxCA31JvxnT+O/GC67eAOdw+iqhBzewZZaCD0=
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v0.8.0 h1:1ByAgO10fxWF+UZ+RkJeAiv+h5AgqrzYz6r86Pn/BWE=
|
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v0.8.0 h1:1ByAgO10fxWF+UZ+RkJeAiv+h5AgqrzYz6r86Pn/BWE=
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,7 @@ type ProviderData struct {
|
||||||
SecretsManagerCustomEndpoint string
|
SecretsManagerCustomEndpoint string
|
||||||
SQLServerFlexCustomEndpoint string
|
SQLServerFlexCustomEndpoint string
|
||||||
ServerBackupCustomEndpoint string
|
ServerBackupCustomEndpoint string
|
||||||
|
ServerUpdateCustomEndpoint string
|
||||||
SKECustomEndpoint string
|
SKECustomEndpoint string
|
||||||
ServiceEnablementCustomEndpoint string
|
ServiceEnablementCustomEndpoint string
|
||||||
EnableBetaResources bool
|
EnableBetaResources bool
|
||||||
|
|
|
||||||
452
stackit/internal/services/serverupdate/schedule/resource.go
Normal file
452
stackit/internal/services/serverupdate/schedule/resource.go
Normal file
|
|
@ -0,0 +1,452 @@
|
||||||
|
package schedule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/path"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/conversion"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate"
|
||||||
|
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/config"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// resourceBetaCheckDone is used to prevent multiple checks for beta resources.
|
||||||
|
// This is a workaround for the lack of a global state in the provider and
|
||||||
|
// needs to exist because the Configure method is called twice.
|
||||||
|
var resourceBetaCheckDone bool
|
||||||
|
|
||||||
|
// Ensure the implementation satisfies the expected interfaces.
|
||||||
|
var (
|
||||||
|
_ resource.Resource = &scheduleResource{}
|
||||||
|
_ resource.ResourceWithConfigure = &scheduleResource{}
|
||||||
|
_ resource.ResourceWithImportState = &scheduleResource{}
|
||||||
|
)
|
||||||
|
|
||||||
|
type Model struct {
|
||||||
|
ID types.String `tfsdk:"id"`
|
||||||
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
|
ServerId types.String `tfsdk:"server_id"`
|
||||||
|
UpdateScheduleId types.Int64 `tfsdk:"update_schedule_id"`
|
||||||
|
Name types.String `tfsdk:"name"`
|
||||||
|
Rrule types.String `tfsdk:"rrule"`
|
||||||
|
Enabled types.Bool `tfsdk:"enabled"`
|
||||||
|
MaintenanceWindow types.Int64 `tfsdk:"maintenance_window"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewScheduleResource is a helper function to simplify the provider implementation.
|
||||||
|
func NewScheduleResource() resource.Resource {
|
||||||
|
return &scheduleResource{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// scheduleResource is the resource implementation.
|
||||||
|
type scheduleResource struct {
|
||||||
|
client *serverupdate.APIClient
|
||||||
|
}
|
||||||
|
|
||||||
|
// Metadata returns the resource type name.
|
||||||
|
func (r *scheduleResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
||||||
|
resp.TypeName = req.ProviderTypeName + "_server_update_schedule"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure adds the provider configured client to the resource.
|
||||||
|
func (r *scheduleResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
|
||||||
|
// Prevent panic if the provider has not been configured.
|
||||||
|
if req.ProviderData == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
providerData, ok := req.ProviderData.(core.ProviderData)
|
||||||
|
if !ok {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !resourceBetaCheckDone {
|
||||||
|
features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_update_schedule", "resource")
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
resourceBetaCheckDone = true
|
||||||
|
}
|
||||||
|
|
||||||
|
var apiClient *serverupdate.APIClient
|
||||||
|
var err error
|
||||||
|
if providerData.ServerUpdateCustomEndpoint != "" {
|
||||||
|
ctx = tflog.SetField(ctx, "server_update_custom_endpoint", providerData.ServerUpdateCustomEndpoint)
|
||||||
|
apiClient, err = serverupdate.NewAPIClient(
|
||||||
|
config.WithCustomAuth(providerData.RoundTripper),
|
||||||
|
config.WithEndpoint(providerData.ServerUpdateCustomEndpoint),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
apiClient, err = serverupdate.NewAPIClient(
|
||||||
|
config.WithCustomAuth(providerData.RoundTripper),
|
||||||
|
config.WithRegion(providerData.Region),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
r.client = apiClient
|
||||||
|
tflog.Info(ctx, "Server update client configured.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Schema defines the schema for the resource.
|
||||||
|
func (r *scheduleResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||||
|
resp.Schema = schema.Schema{
|
||||||
|
Description: "Server update schedule resource schema. Must have a `region` specified in the provider configuration.",
|
||||||
|
MarkdownDescription: features.AddBetaDescription("Server update schedule resource schema. Must have a `region` specified in the provider configuration."),
|
||||||
|
Attributes: map[string]schema.Attribute{
|
||||||
|
"id": schema.StringAttribute{
|
||||||
|
Description: "Terraform's internal resource identifier. It is structured as \"`project_id`,`server_id`,`update_schedule_id`\".",
|
||||||
|
Computed: true,
|
||||||
|
PlanModifiers: []planmodifier.String{
|
||||||
|
stringplanmodifier.UseStateForUnknown(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"name": schema.StringAttribute{
|
||||||
|
Description: "The schedule name.",
|
||||||
|
Required: true,
|
||||||
|
PlanModifiers: []planmodifier.String{
|
||||||
|
stringplanmodifier.RequiresReplace(),
|
||||||
|
stringplanmodifier.UseStateForUnknown(),
|
||||||
|
},
|
||||||
|
Validators: []validator.String{
|
||||||
|
stringvalidator.LengthBetween(1, 255),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"update_schedule_id": schema.Int64Attribute{
|
||||||
|
Description: "Update schedule ID.",
|
||||||
|
Computed: true,
|
||||||
|
PlanModifiers: []planmodifier.Int64{
|
||||||
|
int64planmodifier.UseStateForUnknown(),
|
||||||
|
},
|
||||||
|
Validators: []validator.Int64{
|
||||||
|
int64validator.AtLeast(1),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"project_id": schema.StringAttribute{
|
||||||
|
Description: "STACKIT Project ID to which the server is associated.",
|
||||||
|
Required: true,
|
||||||
|
PlanModifiers: []planmodifier.String{
|
||||||
|
stringplanmodifier.RequiresReplace(),
|
||||||
|
stringplanmodifier.UseStateForUnknown(),
|
||||||
|
},
|
||||||
|
Validators: []validator.String{
|
||||||
|
validate.UUID(),
|
||||||
|
validate.NoSeparator(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"server_id": schema.StringAttribute{
|
||||||
|
Description: "Server ID for the update schedule.",
|
||||||
|
Required: true,
|
||||||
|
PlanModifiers: []planmodifier.String{
|
||||||
|
stringplanmodifier.RequiresReplace(),
|
||||||
|
stringplanmodifier.UseStateForUnknown(),
|
||||||
|
},
|
||||||
|
Validators: []validator.String{
|
||||||
|
validate.UUID(),
|
||||||
|
validate.NoSeparator(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"rrule": schema.StringAttribute{
|
||||||
|
Description: "Update schedule described in `rrule` (recurrence rule) format.",
|
||||||
|
Required: true,
|
||||||
|
PlanModifiers: []planmodifier.String{
|
||||||
|
stringplanmodifier.RequiresReplace(),
|
||||||
|
stringplanmodifier.UseStateForUnknown(),
|
||||||
|
},
|
||||||
|
Validators: []validator.String{
|
||||||
|
validate.Rrule(),
|
||||||
|
validate.NoSeparator(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"enabled": schema.BoolAttribute{
|
||||||
|
Description: "Is the update schedule enabled or disabled.",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
"maintenance_window": schema.Int64Attribute{
|
||||||
|
Description: "Maintenance window [1..24].",
|
||||||
|
Required: true,
|
||||||
|
Validators: []validator.Int64{
|
||||||
|
int64validator.AtLeast(1),
|
||||||
|
int64validator.AtMost(24),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create creates the resource and sets the initial Terraform state.
|
||||||
|
func (r *scheduleResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
var model Model
|
||||||
|
diags := req.Plan.Get(ctx, &model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
projectId := model.ProjectId.ValueString()
|
||||||
|
serverId := model.ServerId.ValueString()
|
||||||
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
|
ctx = tflog.SetField(ctx, "server_id", serverId)
|
||||||
|
|
||||||
|
// Enable updates if not already enabled
|
||||||
|
err := enableUpdatesService(ctx, &model, r.client)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Enabling server update project before creation: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new schedule
|
||||||
|
payload, err := toCreatePayload(&model)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Creating API payload: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
scheduleResp, err := r.client.CreateUpdateSchedule(ctx, projectId, serverId).CreateUpdateSchedulePayload(*payload).Execute()
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Calling API: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ctx = tflog.SetField(ctx, "update_schedule_id", *scheduleResp.Id)
|
||||||
|
|
||||||
|
// Map response body to schema
|
||||||
|
err = mapFields(scheduleResp, &model)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Processing API payload: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
diags = resp.State.Set(ctx, model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tflog.Info(ctx, "Server update schedule created.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read refreshes the Terraform state with the latest data.
|
||||||
|
func (r *scheduleResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
var model Model
|
||||||
|
diags := req.State.Get(ctx, &model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
projectId := model.ProjectId.ValueString()
|
||||||
|
serverId := model.ServerId.ValueString()
|
||||||
|
updateScheduleId := model.UpdateScheduleId.ValueInt64()
|
||||||
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
|
ctx = tflog.SetField(ctx, "server_id", serverId)
|
||||||
|
ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId)
|
||||||
|
|
||||||
|
scheduleResp, err := r.client.GetUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).Execute()
|
||||||
|
if err != nil {
|
||||||
|
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
||||||
|
if ok && oapiErr.StatusCode == http.StatusNotFound {
|
||||||
|
resp.State.RemoveResource(ctx)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading update schedule", fmt.Sprintf("Calling API: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map response body to schema
|
||||||
|
err = mapFields(scheduleResp, &model)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading update schedule", fmt.Sprintf("Processing API payload: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set refreshed state
|
||||||
|
diags = resp.State.Set(ctx, model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tflog.Info(ctx, "Server update schedule read.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update updates the resource and sets the updated Terraform state on success.
|
||||||
|
func (r *scheduleResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
var model Model
|
||||||
|
diags := req.Plan.Get(ctx, &model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
projectId := model.ProjectId.ValueString()
|
||||||
|
serverId := model.ServerId.ValueString()
|
||||||
|
updateScheduleId := model.UpdateScheduleId.ValueInt64()
|
||||||
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
|
ctx = tflog.SetField(ctx, "server_id", serverId)
|
||||||
|
ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId)
|
||||||
|
|
||||||
|
// Update schedule
|
||||||
|
payload, err := toUpdatePayload(&model)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating server update schedule", fmt.Sprintf("Creating API payload: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
scheduleResp, err := r.client.UpdateUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).UpdateUpdateSchedulePayload(*payload).Execute()
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating server update schedule", fmt.Sprintf("Calling API: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map response body to schema
|
||||||
|
err = mapFields(scheduleResp, &model)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating server update schedule", fmt.Sprintf("Processing API payload: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
diags = resp.State.Set(ctx, model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tflog.Info(ctx, "Server update schedule updated.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete deletes the resource and removes the Terraform state on success.
|
||||||
|
func (r *scheduleResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
var model Model
|
||||||
|
diags := req.State.Get(ctx, &model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
projectId := model.ProjectId.ValueString()
|
||||||
|
serverId := model.ServerId.ValueString()
|
||||||
|
updateScheduleId := model.UpdateScheduleId.ValueInt64()
|
||||||
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
|
ctx = tflog.SetField(ctx, "server_id", serverId)
|
||||||
|
ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId)
|
||||||
|
|
||||||
|
err := r.client.DeleteUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).Execute()
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting server update schedule", fmt.Sprintf("Calling API: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tflog.Info(ctx, "Server update schedule deleted.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImportState imports a resource into the Terraform state on success.
|
||||||
|
// The expected format of the resource import identifier is: // project_id,server_id,schedule_id
|
||||||
|
func (r *scheduleResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
|
||||||
|
idParts := strings.Split(req.ID, core.Separator)
|
||||||
|
if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics,
|
||||||
|
"Error importing server update schedule",
|
||||||
|
fmt.Sprintf("Expected import identifier with format [project_id],[server_id],[update_schedule_id], got %q", req.ID),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
intId, err := strconv.ParseInt(idParts[2], 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics,
|
||||||
|
"Error importing server update schedule",
|
||||||
|
fmt.Sprintf("Expected update_schedule_id to be int64, got %q", idParts[2]),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
|
||||||
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("server_id"), idParts[1])...)
|
||||||
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("update_schedule_id"), intId)...)
|
||||||
|
tflog.Info(ctx, "Server update schedule state imported.")
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapFields(schedule *serverupdate.UpdateSchedule, model *Model) error {
|
||||||
|
if schedule == nil {
|
||||||
|
return fmt.Errorf("response input is nil")
|
||||||
|
}
|
||||||
|
if model == nil {
|
||||||
|
return fmt.Errorf("model input is nil")
|
||||||
|
}
|
||||||
|
if schedule.Id == nil {
|
||||||
|
return fmt.Errorf("response id is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
model.UpdateScheduleId = types.Int64PointerValue(schedule.Id)
|
||||||
|
idParts := []string{
|
||||||
|
model.ProjectId.ValueString(),
|
||||||
|
model.ServerId.ValueString(),
|
||||||
|
strconv.FormatInt(model.UpdateScheduleId.ValueInt64(), 10),
|
||||||
|
}
|
||||||
|
model.ID = types.StringValue(
|
||||||
|
strings.Join(idParts, core.Separator),
|
||||||
|
)
|
||||||
|
model.Name = types.StringPointerValue(schedule.Name)
|
||||||
|
model.Rrule = types.StringPointerValue(schedule.Rrule)
|
||||||
|
model.Enabled = types.BoolPointerValue(schedule.Enabled)
|
||||||
|
model.MaintenanceWindow = types.Int64PointerValue(schedule.MaintenanceWindow)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// If already enabled, just continues
|
||||||
|
func enableUpdatesService(ctx context.Context, model *Model, client *serverupdate.APIClient) error {
|
||||||
|
projectId := model.ProjectId.ValueString()
|
||||||
|
serverId := model.ServerId.ValueString()
|
||||||
|
enableServicePayload := serverupdate.EnableServicePayload{}
|
||||||
|
|
||||||
|
tflog.Debug(ctx, "Enabling server update service")
|
||||||
|
err := client.EnableService(ctx, projectId, serverId).EnableServicePayload(enableServicePayload).Execute()
|
||||||
|
if err != nil {
|
||||||
|
if strings.Contains(err.Error(), "Tried to activate already active service") {
|
||||||
|
tflog.Debug(ctx, "Service for server update already enabled")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("enable server update service: %w", err)
|
||||||
|
}
|
||||||
|
tflog.Info(ctx, "Enabled server update service")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func toCreatePayload(model *Model) (*serverupdate.CreateUpdateSchedulePayload, error) {
|
||||||
|
if model == nil {
|
||||||
|
return nil, fmt.Errorf("nil model")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &serverupdate.CreateUpdateSchedulePayload{
|
||||||
|
Enabled: conversion.BoolValueToPointer(model.Enabled),
|
||||||
|
Name: conversion.StringValueToPointer(model.Name),
|
||||||
|
Rrule: conversion.StringValueToPointer(model.Rrule),
|
||||||
|
MaintenanceWindow: conversion.Int64ValueToPointer(model.MaintenanceWindow),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func toUpdatePayload(model *Model) (*serverupdate.UpdateUpdateSchedulePayload, error) {
|
||||||
|
if model == nil {
|
||||||
|
return nil, fmt.Errorf("nil model")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &serverupdate.UpdateUpdateSchedulePayload{
|
||||||
|
Enabled: conversion.BoolValueToPointer(model.Enabled),
|
||||||
|
Name: conversion.StringValueToPointer(model.Name),
|
||||||
|
Rrule: conversion.StringValueToPointer(model.Rrule),
|
||||||
|
MaintenanceWindow: conversion.Int64ValueToPointer(model.MaintenanceWindow),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
221
stackit/internal/services/serverupdate/schedule/resource_test.go
Normal file
221
stackit/internal/services/serverupdate/schedule/resource_test.go
Normal file
|
|
@ -0,0 +1,221 @@
|
||||||
|
package schedule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
sdk "github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMapFields(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
description string
|
||||||
|
input *sdk.UpdateSchedule
|
||||||
|
expected Model
|
||||||
|
isValid bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"default_values",
|
||||||
|
&sdk.UpdateSchedule{
|
||||||
|
Id: utils.Ptr(int64(5)),
|
||||||
|
},
|
||||||
|
Model{
|
||||||
|
ID: types.StringValue("project_uid,server_uid,5"),
|
||||||
|
ProjectId: types.StringValue("project_uid"),
|
||||||
|
ServerId: types.StringValue("server_uid"),
|
||||||
|
UpdateScheduleId: types.Int64Value(5),
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"simple_values",
|
||||||
|
&sdk.UpdateSchedule{
|
||||||
|
Id: utils.Ptr(int64(5)),
|
||||||
|
Enabled: utils.Ptr(true),
|
||||||
|
Name: utils.Ptr("update_schedule_name_1"),
|
||||||
|
Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
|
||||||
|
MaintenanceWindow: utils.Ptr(int64(1)),
|
||||||
|
},
|
||||||
|
Model{
|
||||||
|
ServerId: types.StringValue("server_uid"),
|
||||||
|
ProjectId: types.StringValue("project_uid"),
|
||||||
|
UpdateScheduleId: types.Int64Value(5),
|
||||||
|
ID: types.StringValue("project_uid,server_uid,5"),
|
||||||
|
Name: types.StringValue("update_schedule_name_1"),
|
||||||
|
Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
|
||||||
|
Enabled: types.BoolValue(true),
|
||||||
|
MaintenanceWindow: types.Int64Value(1),
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nil_response",
|
||||||
|
nil,
|
||||||
|
Model{},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"no_resource_id",
|
||||||
|
&sdk.UpdateSchedule{},
|
||||||
|
Model{},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.description, func(t *testing.T) {
|
||||||
|
state := &Model{
|
||||||
|
ProjectId: tt.expected.ProjectId,
|
||||||
|
ServerId: tt.expected.ServerId,
|
||||||
|
}
|
||||||
|
err := mapFields(tt.input, state)
|
||||||
|
if !tt.isValid && err == nil {
|
||||||
|
t.Fatalf("Should have failed")
|
||||||
|
}
|
||||||
|
if tt.isValid && err != nil {
|
||||||
|
t.Fatalf("Should not have failed: %v", err)
|
||||||
|
}
|
||||||
|
if tt.isValid {
|
||||||
|
diff := cmp.Diff(state, &tt.expected)
|
||||||
|
if diff != "" {
|
||||||
|
t.Fatalf("Data does not match: %s", diff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestToCreatePayload(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
description string
|
||||||
|
input *Model
|
||||||
|
expected *sdk.CreateUpdateSchedulePayload
|
||||||
|
isValid bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"default_values",
|
||||||
|
&Model{},
|
||||||
|
&sdk.CreateUpdateSchedulePayload{},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"simple_values",
|
||||||
|
&Model{
|
||||||
|
Name: types.StringValue("name"),
|
||||||
|
Enabled: types.BoolValue(true),
|
||||||
|
Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
|
||||||
|
MaintenanceWindow: types.Int64Value(1),
|
||||||
|
},
|
||||||
|
&sdk.CreateUpdateSchedulePayload{
|
||||||
|
Name: utils.Ptr("name"),
|
||||||
|
Enabled: utils.Ptr(true),
|
||||||
|
Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
|
||||||
|
MaintenanceWindow: utils.Ptr(int64(1)),
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"null_fields_and_int_conversions",
|
||||||
|
&Model{
|
||||||
|
Name: types.StringValue(""),
|
||||||
|
Rrule: types.StringValue(""),
|
||||||
|
},
|
||||||
|
&sdk.CreateUpdateSchedulePayload{
|
||||||
|
Name: utils.Ptr(""),
|
||||||
|
Rrule: utils.Ptr(""),
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nil_model",
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.description, func(t *testing.T) {
|
||||||
|
output, err := toCreatePayload(tt.input)
|
||||||
|
if !tt.isValid && err == nil {
|
||||||
|
t.Fatalf("Should have failed")
|
||||||
|
}
|
||||||
|
if tt.isValid && err != nil {
|
||||||
|
t.Fatalf("Should not have failed: %v", err)
|
||||||
|
}
|
||||||
|
if tt.isValid {
|
||||||
|
diff := cmp.Diff(output, tt.expected)
|
||||||
|
if diff != "" {
|
||||||
|
t.Fatalf("Data does not match: %s", diff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestToUpdatePayload(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
description string
|
||||||
|
input *Model
|
||||||
|
expected *sdk.UpdateUpdateSchedulePayload
|
||||||
|
isValid bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"default_values",
|
||||||
|
&Model{},
|
||||||
|
&sdk.UpdateUpdateSchedulePayload{},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"simple_values",
|
||||||
|
&Model{
|
||||||
|
Name: types.StringValue("name"),
|
||||||
|
Enabled: types.BoolValue(true),
|
||||||
|
Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
|
||||||
|
MaintenanceWindow: types.Int64Value(1),
|
||||||
|
},
|
||||||
|
&sdk.UpdateUpdateSchedulePayload{
|
||||||
|
Name: utils.Ptr("name"),
|
||||||
|
Enabled: utils.Ptr(true),
|
||||||
|
Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
|
||||||
|
MaintenanceWindow: utils.Ptr(int64(1)),
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"null_fields_and_int_conversions",
|
||||||
|
&Model{
|
||||||
|
Name: types.StringValue(""),
|
||||||
|
Rrule: types.StringValue(""),
|
||||||
|
},
|
||||||
|
&sdk.UpdateUpdateSchedulePayload{
|
||||||
|
Name: utils.Ptr(""),
|
||||||
|
Rrule: utils.Ptr(""),
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nil_model",
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.description, func(t *testing.T) {
|
||||||
|
output, err := toUpdatePayload(tt.input)
|
||||||
|
if !tt.isValid && err == nil {
|
||||||
|
t.Fatalf("Should have failed")
|
||||||
|
}
|
||||||
|
if tt.isValid && err != nil {
|
||||||
|
t.Fatalf("Should not have failed: %v", err)
|
||||||
|
}
|
||||||
|
if tt.isValid {
|
||||||
|
diff := cmp.Diff(output, tt.expected)
|
||||||
|
if diff != "" {
|
||||||
|
t.Fatalf("Data does not match: %s", diff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,182 @@
|
||||||
|
package schedule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/datasource"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
|
||||||
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate"
|
||||||
|
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/config"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// scheduleDataSourceBetaCheckDone is used to prevent multiple checks for beta resources.
|
||||||
|
// This is a workaround for the lack of a global state in the provider and
|
||||||
|
// needs to exist because the Configure method is called twice.
|
||||||
|
var scheduleDataSourceBetaCheckDone bool
|
||||||
|
|
||||||
|
// Ensure the implementation satisfies the expected interfaces.
|
||||||
|
var (
|
||||||
|
_ datasource.DataSource = &scheduleDataSource{}
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewScheduleDataSource is a helper function to simplify the provider implementation.
|
||||||
|
func NewScheduleDataSource() datasource.DataSource {
|
||||||
|
return &scheduleDataSource{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// scheduleDataSource is the data source implementation.
|
||||||
|
type scheduleDataSource struct {
|
||||||
|
client *serverupdate.APIClient
|
||||||
|
}
|
||||||
|
|
||||||
|
// Metadata returns the data source type name.
|
||||||
|
func (r *scheduleDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
|
||||||
|
resp.TypeName = req.ProviderTypeName + "_server_update_schedule"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure adds the provider configured client to the data source.
|
||||||
|
func (r *scheduleDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
|
||||||
|
// Prevent panic if the provider has not been configured.
|
||||||
|
if req.ProviderData == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
providerData, ok := req.ProviderData.(core.ProviderData)
|
||||||
|
if !ok {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !scheduleDataSourceBetaCheckDone {
|
||||||
|
features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_update_schedule", "data source")
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
scheduleDataSourceBetaCheckDone = true
|
||||||
|
}
|
||||||
|
|
||||||
|
var apiClient *serverupdate.APIClient
|
||||||
|
var err error
|
||||||
|
if providerData.ServerUpdateCustomEndpoint != "" {
|
||||||
|
ctx = tflog.SetField(ctx, "server_update_custom_endpoint", providerData.ServerUpdateCustomEndpoint)
|
||||||
|
apiClient, err = serverupdate.NewAPIClient(
|
||||||
|
config.WithCustomAuth(providerData.RoundTripper),
|
||||||
|
config.WithEndpoint(providerData.ServerUpdateCustomEndpoint),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
apiClient, err = serverupdate.NewAPIClient(
|
||||||
|
config.WithCustomAuth(providerData.RoundTripper),
|
||||||
|
config.WithRegion(providerData.Region),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the data source configuration", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
r.client = apiClient
|
||||||
|
tflog.Info(ctx, "Server update client configured")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Schema defines the schema for the data source.
|
||||||
|
func (r *scheduleDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
|
||||||
|
resp.Schema = schema.Schema{
|
||||||
|
Description: "Server update schedule datasource schema. Must have a `region` specified in the provider configuration.",
|
||||||
|
MarkdownDescription: features.AddBetaDescription("Server update schedule datasource schema. Must have a `region` specified in the provider configuration."),
|
||||||
|
Attributes: map[string]schema.Attribute{
|
||||||
|
"id": schema.StringAttribute{
|
||||||
|
Description: "Terraform's internal resource identifier. It is structured as \"`project_id`,`server_id`,`update_schedule_id`\".",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"name": schema.StringAttribute{
|
||||||
|
Description: "The schedule name.",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"update_schedule_id": schema.Int64Attribute{
|
||||||
|
Description: "Update schedule ID.",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
"project_id": schema.StringAttribute{
|
||||||
|
Description: "STACKIT Project ID to which the server is associated.",
|
||||||
|
Required: true,
|
||||||
|
Validators: []validator.String{
|
||||||
|
validate.UUID(),
|
||||||
|
validate.NoSeparator(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"server_id": schema.StringAttribute{
|
||||||
|
Description: "Server ID for the update schedule.",
|
||||||
|
Required: true,
|
||||||
|
Validators: []validator.String{
|
||||||
|
validate.UUID(),
|
||||||
|
validate.NoSeparator(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"rrule": schema.StringAttribute{
|
||||||
|
Description: "Update schedule described in `rrule` (recurrence rule) format.",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"enabled": schema.BoolAttribute{
|
||||||
|
Description: "Is the update schedule enabled or disabled.",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"maintenance_window": schema.Int64Attribute{
|
||||||
|
Description: "Maintenance window [1..24].",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read refreshes the Terraform state with the latest data.
|
||||||
|
func (r *scheduleDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
var model Model
|
||||||
|
diags := req.Config.Get(ctx, &model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
projectId := model.ProjectId.ValueString()
|
||||||
|
serverId := model.ServerId.ValueString()
|
||||||
|
updateScheduleId := model.UpdateScheduleId.ValueInt64()
|
||||||
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
|
ctx = tflog.SetField(ctx, "server_id", serverId)
|
||||||
|
ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId)
|
||||||
|
|
||||||
|
scheduleResp, err := r.client.GetUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).Execute()
|
||||||
|
if err != nil {
|
||||||
|
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
||||||
|
if ok && oapiErr.StatusCode == http.StatusNotFound {
|
||||||
|
resp.State.RemoveResource(ctx)
|
||||||
|
}
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedule", fmt.Sprintf("Calling API: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map response body to schema
|
||||||
|
err = mapFields(scheduleResp, &model)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedule", fmt.Sprintf("Processing API payload: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set refreshed state
|
||||||
|
diags = resp.State.Set(ctx, model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tflog.Info(ctx, "Server update schedule read")
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,233 @@
|
||||||
|
package schedule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/datasource"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/config"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// scheduleDataSourceBetaCheckDone is used to prevent multiple checks for beta resources.
|
||||||
|
// This is a workaround for the lack of a global state in the provider and
|
||||||
|
// needs to exist because the Configure method is called twice.
|
||||||
|
var schedulesDataSourceBetaCheckDone bool
|
||||||
|
|
||||||
|
// Ensure the implementation satisfies the expected interfaces.
|
||||||
|
var (
|
||||||
|
_ datasource.DataSource = &schedulesDataSource{}
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewSchedulesDataSource is a helper function to simplify the provider implementation.
|
||||||
|
func NewSchedulesDataSource() datasource.DataSource {
|
||||||
|
return &schedulesDataSource{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// schedulesDataSource is the data source implementation.
|
||||||
|
type schedulesDataSource struct {
|
||||||
|
client *serverupdate.APIClient
|
||||||
|
}
|
||||||
|
|
||||||
|
// Metadata returns the data source type name.
|
||||||
|
func (r *schedulesDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
|
||||||
|
resp.TypeName = req.ProviderTypeName + "_server_update_schedules"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure adds the provider configured client to the data source.
|
||||||
|
func (r *schedulesDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
|
||||||
|
// Prevent panic if the provider has not been configured.
|
||||||
|
if req.ProviderData == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
providerData, ok := req.ProviderData.(core.ProviderData)
|
||||||
|
if !ok {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !schedulesDataSourceBetaCheckDone {
|
||||||
|
features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_update_schedules", "data source")
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
schedulesDataSourceBetaCheckDone = true
|
||||||
|
}
|
||||||
|
|
||||||
|
var apiClient *serverupdate.APIClient
|
||||||
|
var err error
|
||||||
|
if providerData.ServerUpdateCustomEndpoint != "" {
|
||||||
|
ctx = tflog.SetField(ctx, "server_update_custom_endpoint", providerData.ServerUpdateCustomEndpoint)
|
||||||
|
apiClient, err = serverupdate.NewAPIClient(
|
||||||
|
config.WithCustomAuth(providerData.RoundTripper),
|
||||||
|
config.WithEndpoint(providerData.ServerUpdateCustomEndpoint),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
apiClient, err = serverupdate.NewAPIClient(
|
||||||
|
config.WithCustomAuth(providerData.RoundTripper),
|
||||||
|
config.WithRegion(providerData.Region),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the data source configuration", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
r.client = apiClient
|
||||||
|
tflog.Info(ctx, "Server update client configured")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Schema defines the schema for the data source.
|
||||||
|
func (r *schedulesDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
|
||||||
|
resp.Schema = schema.Schema{
|
||||||
|
Description: "Server update schedules datasource schema. Must have a `region` specified in the provider configuration.",
|
||||||
|
MarkdownDescription: features.AddBetaDescription("Server update schedules datasource schema. Must have a `region` specified in the provider configuration."),
|
||||||
|
Attributes: map[string]schema.Attribute{
|
||||||
|
"id": schema.StringAttribute{
|
||||||
|
Description: "Terraform's internal data source identifier. It is structured as \"`project_id`,`server_id`\".",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"project_id": schema.StringAttribute{
|
||||||
|
Description: "STACKIT Project ID (UUID) to which the server is associated.",
|
||||||
|
Required: true,
|
||||||
|
Validators: []validator.String{
|
||||||
|
validate.UUID(),
|
||||||
|
validate.NoSeparator(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"server_id": schema.StringAttribute{
|
||||||
|
Description: "Server ID (UUID) to which the update schedule is associated.",
|
||||||
|
Required: true,
|
||||||
|
Validators: []validator.String{
|
||||||
|
validate.UUID(),
|
||||||
|
validate.NoSeparator(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"items": schema.ListNestedAttribute{
|
||||||
|
Computed: true,
|
||||||
|
NestedObject: schema.NestedAttributeObject{
|
||||||
|
Attributes: map[string]schema.Attribute{
|
||||||
|
"update_schedule_id": schema.Int64Attribute{
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"name": schema.StringAttribute{
|
||||||
|
Description: "The update schedule name.",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"rrule": schema.StringAttribute{
|
||||||
|
Description: "Update schedule described in `rrule` (recurrence rule) format.",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"enabled": schema.BoolAttribute{
|
||||||
|
Description: "Is the update schedule enabled or disabled.",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"maintenance_window": schema.Int64Attribute{
|
||||||
|
Description: "Maintenance window [1..24].",
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// schedulesDataSourceModel maps the data source schema data.
|
||||||
|
type schedulesDataSourceModel struct {
|
||||||
|
ID types.String `tfsdk:"id"`
|
||||||
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
|
ServerId types.String `tfsdk:"server_id"`
|
||||||
|
Items []schedulesDatasourceItemModel `tfsdk:"items"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// schedulesDatasourceItemModel maps schedule schema data.
|
||||||
|
type schedulesDatasourceItemModel struct {
|
||||||
|
UpdateScheduleId types.Int64 `tfsdk:"update_schedule_id"`
|
||||||
|
Name types.String `tfsdk:"name"`
|
||||||
|
Rrule types.String `tfsdk:"rrule"`
|
||||||
|
Enabled types.Bool `tfsdk:"enabled"`
|
||||||
|
MaintenanceWindow types.Int64 `tfsdk:"maintenance_window"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read refreshes the Terraform state with the latest data.
|
||||||
|
func (r *schedulesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
var model schedulesDataSourceModel
|
||||||
|
diags := req.Config.Get(ctx, &model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
projectId := model.ProjectId.ValueString()
|
||||||
|
serverId := model.ServerId.ValueString()
|
||||||
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
|
ctx = tflog.SetField(ctx, "server_id", serverId)
|
||||||
|
|
||||||
|
schedules, err := r.client.ListUpdateSchedules(ctx, projectId, serverId).Execute()
|
||||||
|
if err != nil {
|
||||||
|
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
||||||
|
if ok && oapiErr.StatusCode == http.StatusNotFound {
|
||||||
|
resp.State.RemoveResource(ctx)
|
||||||
|
}
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedules", fmt.Sprintf("Calling API: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map response body to schema
|
||||||
|
err = mapSchedulesDatasourceFields(ctx, schedules, &model)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedules", fmt.Sprintf("Processing API payload: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set refreshed state
|
||||||
|
diags = resp.State.Set(ctx, model)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tflog.Info(ctx, "Server update schedules read")
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapSchedulesDatasourceFields(ctx context.Context, schedules *serverupdate.GetUpdateSchedulesResponse, model *schedulesDataSourceModel) error {
|
||||||
|
if schedules == nil {
|
||||||
|
return fmt.Errorf("response input is nil")
|
||||||
|
}
|
||||||
|
if model == nil {
|
||||||
|
return fmt.Errorf("model input is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
tflog.Debug(ctx, "response", map[string]any{"schedules": schedules})
|
||||||
|
projectId := model.ProjectId.ValueString()
|
||||||
|
serverId := model.ServerId.ValueString()
|
||||||
|
|
||||||
|
idParts := []string{projectId, serverId}
|
||||||
|
model.ID = types.StringValue(
|
||||||
|
strings.Join(idParts, core.Separator),
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, schedule := range *schedules.Items {
|
||||||
|
scheduleState := schedulesDatasourceItemModel{
|
||||||
|
UpdateScheduleId: types.Int64Value(*schedule.Id),
|
||||||
|
Name: types.StringValue(*schedule.Name),
|
||||||
|
Rrule: types.StringValue(*schedule.Rrule),
|
||||||
|
Enabled: types.BoolValue(*schedule.Enabled),
|
||||||
|
MaintenanceWindow: types.Int64Value(*schedule.MaintenanceWindow),
|
||||||
|
}
|
||||||
|
model.Items = append(model.Items, scheduleState)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,91 @@
|
||||||
|
package schedule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
sdk "github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMapSchedulesDataSourceFields(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
description string
|
||||||
|
input *sdk.GetUpdateSchedulesResponse
|
||||||
|
expected schedulesDataSourceModel
|
||||||
|
isValid bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"empty response",
|
||||||
|
&sdk.GetUpdateSchedulesResponse{
|
||||||
|
Items: &[]sdk.UpdateSchedule{},
|
||||||
|
},
|
||||||
|
schedulesDataSourceModel{
|
||||||
|
ID: types.StringValue("project_uid,server_uid"),
|
||||||
|
ProjectId: types.StringValue("project_uid"),
|
||||||
|
ServerId: types.StringValue("server_uid"),
|
||||||
|
Items: nil,
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"simple_values",
|
||||||
|
&sdk.GetUpdateSchedulesResponse{
|
||||||
|
Items: &[]sdk.UpdateSchedule{
|
||||||
|
{
|
||||||
|
Id: utils.Ptr(int64(5)),
|
||||||
|
Enabled: utils.Ptr(true),
|
||||||
|
Name: utils.Ptr("update_schedule_name_1"),
|
||||||
|
Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
|
||||||
|
MaintenanceWindow: utils.Ptr(int64(1)),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
schedulesDataSourceModel{
|
||||||
|
ID: types.StringValue("project_uid,server_uid"),
|
||||||
|
ServerId: types.StringValue("server_uid"),
|
||||||
|
ProjectId: types.StringValue("project_uid"),
|
||||||
|
Items: []schedulesDatasourceItemModel{
|
||||||
|
{
|
||||||
|
UpdateScheduleId: types.Int64Value(5),
|
||||||
|
Name: types.StringValue("update_schedule_name_1"),
|
||||||
|
Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
|
||||||
|
Enabled: types.BoolValue(true),
|
||||||
|
MaintenanceWindow: types.Int64Value(1),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nil_response",
|
||||||
|
nil,
|
||||||
|
schedulesDataSourceModel{},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.description, func(t *testing.T) {
|
||||||
|
state := &schedulesDataSourceModel{
|
||||||
|
ProjectId: tt.expected.ProjectId,
|
||||||
|
ServerId: tt.expected.ServerId,
|
||||||
|
}
|
||||||
|
ctx := context.TODO()
|
||||||
|
err := mapSchedulesDatasourceFields(ctx, tt.input, state)
|
||||||
|
if !tt.isValid && err == nil {
|
||||||
|
t.Fatalf("Should have failed")
|
||||||
|
}
|
||||||
|
if tt.isValid && err != nil {
|
||||||
|
t.Fatalf("Should not have failed: %v", err)
|
||||||
|
}
|
||||||
|
if tt.isValid {
|
||||||
|
diff := cmp.Diff(state, &tt.expected)
|
||||||
|
if diff != "" {
|
||||||
|
t.Fatalf("Data does not match: %s", diff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
198
stackit/internal/services/serverupdate/serverupdate_acc_test.go
Normal file
198
stackit/internal/services/serverupdate/serverupdate_acc_test.go
Normal file
|
|
@ -0,0 +1,198 @@
|
||||||
|
package serverupdate_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
|
||||||
|
"github.com/hashicorp/terraform-plugin-testing/terraform"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/config"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core"
|
||||||
|
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/testutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Server update schedule resource data
|
||||||
|
var serverUpdateScheduleResource = map[string]string{
|
||||||
|
"project_id": testutil.ProjectId,
|
||||||
|
"server_id": testutil.ServerId,
|
||||||
|
"name": testutil.ResourceNameWithDateTime("server-update-schedule"),
|
||||||
|
"rrule": "DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1",
|
||||||
|
"maintenance_window": "1",
|
||||||
|
}
|
||||||
|
|
||||||
|
func resourceConfig(maintenanceWindow int64) string {
|
||||||
|
return fmt.Sprintf(`
|
||||||
|
%s
|
||||||
|
|
||||||
|
resource "stackit_server_update_schedule" "test_schedule" {
|
||||||
|
project_id = "%s"
|
||||||
|
server_id = "%s"
|
||||||
|
name = "%s"
|
||||||
|
rrule = "%s"
|
||||||
|
enabled = true
|
||||||
|
maintenance_window = %d
|
||||||
|
}
|
||||||
|
`,
|
||||||
|
testutil.ServerUpdateProviderConfig(),
|
||||||
|
serverUpdateScheduleResource["project_id"],
|
||||||
|
serverUpdateScheduleResource["server_id"],
|
||||||
|
serverUpdateScheduleResource["name"],
|
||||||
|
serverUpdateScheduleResource["rrule"],
|
||||||
|
maintenanceWindow,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccServerUpdateScheduleResource(t *testing.T) {
|
||||||
|
if testutil.ServerId == "" {
|
||||||
|
fmt.Println("TF_ACC_SERVER_ID not set, skipping test")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var invalidMaintenanceWindow int64 = 0
|
||||||
|
var validMaintenanceWindow int64 = 15
|
||||||
|
var updatedMaintenanceWindow int64 = 8
|
||||||
|
resource.Test(t, resource.TestCase{
|
||||||
|
ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
|
||||||
|
CheckDestroy: testAccCheckServerUpdateScheduleDestroy,
|
||||||
|
Steps: []resource.TestStep{
|
||||||
|
// Creation fail
|
||||||
|
{
|
||||||
|
Config: resourceConfig(invalidMaintenanceWindow),
|
||||||
|
ExpectError: regexp.MustCompile(`.*maintenance_window value must be at least 1*`),
|
||||||
|
},
|
||||||
|
// Creation
|
||||||
|
{
|
||||||
|
Config: resourceConfig(validMaintenanceWindow),
|
||||||
|
Check: resource.ComposeAggregateTestCheckFunc(
|
||||||
|
// Update schedule data
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "project_id", serverUpdateScheduleResource["project_id"]),
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "server_id", serverUpdateScheduleResource["server_id"]),
|
||||||
|
resource.TestCheckResourceAttrSet("stackit_server_update_schedule.test_schedule", "update_schedule_id"),
|
||||||
|
resource.TestCheckResourceAttrSet("stackit_server_update_schedule.test_schedule", "id"),
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "name", serverUpdateScheduleResource["name"]),
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "rrule", serverUpdateScheduleResource["rrule"]),
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "enabled", strconv.FormatBool(true)),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
// data source
|
||||||
|
{
|
||||||
|
Config: fmt.Sprintf(`
|
||||||
|
%s
|
||||||
|
|
||||||
|
data "stackit_server_update_schedules" "schedules_data_test" {
|
||||||
|
project_id = stackit_server_update_schedule.test_schedule.project_id
|
||||||
|
server_id = stackit_server_update_schedule.test_schedule.server_id
|
||||||
|
}
|
||||||
|
|
||||||
|
data "stackit_server_update_schedule" "schedule_data_test" {
|
||||||
|
project_id = stackit_server_update_schedule.test_schedule.project_id
|
||||||
|
server_id = stackit_server_update_schedule.test_schedule.server_id
|
||||||
|
update_schedule_id = stackit_server_update_schedule.test_schedule.update_schedule_id
|
||||||
|
}`,
|
||||||
|
resourceConfig(validMaintenanceWindow),
|
||||||
|
),
|
||||||
|
Check: resource.ComposeAggregateTestCheckFunc(
|
||||||
|
// Server update schedule data
|
||||||
|
resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "project_id", serverUpdateScheduleResource["project_id"]),
|
||||||
|
resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "server_id", serverUpdateScheduleResource["server_id"]),
|
||||||
|
resource.TestCheckResourceAttrSet("data.stackit_server_update_schedule.schedule_data_test", "update_schedule_id"),
|
||||||
|
resource.TestCheckResourceAttrSet("data.stackit_server_update_schedule.schedule_data_test", "id"),
|
||||||
|
resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "name", serverUpdateScheduleResource["name"]),
|
||||||
|
resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "rrule", serverUpdateScheduleResource["rrule"]),
|
||||||
|
resource.TestCheckResourceAttr("data.stackit_server_update_schedule.schedule_data_test", "enabled", strconv.FormatBool(true)),
|
||||||
|
|
||||||
|
// Server update schedules data
|
||||||
|
resource.TestCheckResourceAttr("data.stackit_server_update_schedules.schedules_data_test", "project_id", serverUpdateScheduleResource["project_id"]),
|
||||||
|
resource.TestCheckResourceAttr("data.stackit_server_update_schedules.schedules_data_test", "server_id", serverUpdateScheduleResource["server_id"]),
|
||||||
|
resource.TestCheckResourceAttrSet("data.stackit_server_update_schedules.schedules_data_test", "id"),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
// Import
|
||||||
|
{
|
||||||
|
ResourceName: "stackit_server_update_schedule.test_schedule",
|
||||||
|
ImportStateIdFunc: func(s *terraform.State) (string, error) {
|
||||||
|
r, ok := s.RootModule().Resources["stackit_server_update_schedule.test_schedule"]
|
||||||
|
if !ok {
|
||||||
|
return "", fmt.Errorf("couldn't find resource stackit_server_update_schedule.test_schedule")
|
||||||
|
}
|
||||||
|
scheduleId, ok := r.Primary.Attributes["update_schedule_id"]
|
||||||
|
if !ok {
|
||||||
|
return "", fmt.Errorf("couldn't find attribute update_schedule_id")
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.ServerId, scheduleId), nil
|
||||||
|
},
|
||||||
|
ImportState: true,
|
||||||
|
ImportStateVerify: true,
|
||||||
|
},
|
||||||
|
// Update
|
||||||
|
{
|
||||||
|
Config: resourceConfig(updatedMaintenanceWindow),
|
||||||
|
Check: resource.ComposeAggregateTestCheckFunc(
|
||||||
|
// Update schedule data
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "project_id", serverUpdateScheduleResource["project_id"]),
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "server_id", serverUpdateScheduleResource["server_id"]),
|
||||||
|
resource.TestCheckResourceAttrSet("stackit_server_update_schedule.test_schedule", "update_schedule_id"),
|
||||||
|
resource.TestCheckResourceAttrSet("stackit_server_update_schedule.test_schedule", "id"),
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "name", serverUpdateScheduleResource["name"]),
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "rrule", serverUpdateScheduleResource["rrule"]),
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "enabled", strconv.FormatBool(true)),
|
||||||
|
resource.TestCheckResourceAttr("stackit_server_update_schedule.test_schedule", "maintenance_window", strconv.FormatInt(8, 10)),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
// Deletion is done by the framework implicitly
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func testAccCheckServerUpdateScheduleDestroy(s *terraform.State) error {
|
||||||
|
ctx := context.Background()
|
||||||
|
var client *serverupdate.APIClient
|
||||||
|
var err error
|
||||||
|
if testutil.ServerUpdateCustomEndpoint == "" {
|
||||||
|
client, err = serverupdate.NewAPIClient(
|
||||||
|
config.WithRegion("eu01"),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
client, err = serverupdate.NewAPIClient(
|
||||||
|
config.WithEndpoint(testutil.ServerUpdateCustomEndpoint),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("creating client: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schedulesToDestroy := []string{}
|
||||||
|
for _, rs := range s.RootModule().Resources {
|
||||||
|
if rs.Type != "stackit_server_update_schedule" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// server update schedule terraform ID: "[project_id],[server_id],[update_schedule_id]"
|
||||||
|
scheduleId := strings.Split(rs.Primary.ID, core.Separator)[2]
|
||||||
|
schedulesToDestroy = append(schedulesToDestroy, scheduleId)
|
||||||
|
}
|
||||||
|
|
||||||
|
schedulesResp, err := client.ListUpdateSchedules(ctx, testutil.ProjectId, testutil.ServerId).Execute()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("getting schedulesResp: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
schedules := *schedulesResp.Items
|
||||||
|
for i := range schedules {
|
||||||
|
if schedules[i].Id == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
scheduleId := strconv.FormatInt(*schedules[i].Id, 10)
|
||||||
|
if utils.Contains(schedulesToDestroy, scheduleId) {
|
||||||
|
err := client.DeleteUpdateScheduleExecute(ctx, testutil.ProjectId, testutil.ServerId, scheduleId)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("destroying server update schedule %s during CheckDestroy: %w", scheduleId, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
@ -65,6 +65,7 @@ var (
|
||||||
SecretsManagerCustomEndpoint = os.Getenv("TF_ACC_SECRETSMANAGER_CUSTOM_ENDPOINT")
|
SecretsManagerCustomEndpoint = os.Getenv("TF_ACC_SECRETSMANAGER_CUSTOM_ENDPOINT")
|
||||||
SQLServerFlexCustomEndpoint = os.Getenv("TF_ACC_SQLSERVERFLEX_CUSTOM_ENDPOINT")
|
SQLServerFlexCustomEndpoint = os.Getenv("TF_ACC_SQLSERVERFLEX_CUSTOM_ENDPOINT")
|
||||||
ServerBackupCustomEndpoint = os.Getenv("TF_ACC_SERVER_BACKUP_CUSTOM_ENDPOINT")
|
ServerBackupCustomEndpoint = os.Getenv("TF_ACC_SERVER_BACKUP_CUSTOM_ENDPOINT")
|
||||||
|
ServerUpdateCustomEndpoint = os.Getenv("TF_ACC_SERVER_UPDATE_CUSTOM_ENDPOINT")
|
||||||
SKECustomEndpoint = os.Getenv("TF_ACC_SKE_CUSTOM_ENDPOINT")
|
SKECustomEndpoint = os.Getenv("TF_ACC_SKE_CUSTOM_ENDPOINT")
|
||||||
|
|
||||||
// OpenStack user domain name
|
// OpenStack user domain name
|
||||||
|
|
@ -341,6 +342,21 @@ func ServerBackupProviderConfig() string {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ServerUpdateProviderConfig() string {
|
||||||
|
if ServerUpdateCustomEndpoint == "" {
|
||||||
|
return `
|
||||||
|
provider "stackit" {
|
||||||
|
region = "eu01"
|
||||||
|
}`
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(`
|
||||||
|
provider "stackit" {
|
||||||
|
server_update_custom_endpoint = "%s"
|
||||||
|
}`,
|
||||||
|
ServerUpdateCustomEndpoint,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
func SKEProviderConfig() string {
|
func SKEProviderConfig() string {
|
||||||
if SKECustomEndpoint == "" {
|
if SKECustomEndpoint == "" {
|
||||||
return `
|
return `
|
||||||
|
|
|
||||||
|
|
@ -56,6 +56,7 @@ import (
|
||||||
secretsManagerInstance "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/secretsmanager/instance"
|
secretsManagerInstance "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/secretsmanager/instance"
|
||||||
secretsManagerUser "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/secretsmanager/user"
|
secretsManagerUser "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/secretsmanager/user"
|
||||||
serverBackupSchedule "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/serverbackup/schedule"
|
serverBackupSchedule "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/serverbackup/schedule"
|
||||||
|
serverUpdateSchedule "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/serverupdate/schedule"
|
||||||
skeCluster "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/ske/cluster"
|
skeCluster "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/ske/cluster"
|
||||||
skeKubeconfig "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/ske/kubeconfig"
|
skeKubeconfig "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/ske/kubeconfig"
|
||||||
skeProject "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/ske/project"
|
skeProject "github.com/stackitcloud/terraform-provider-stackit/stackit/internal/services/ske/project"
|
||||||
|
|
@ -119,6 +120,7 @@ type providerModel struct {
|
||||||
SQLServerFlexCustomEndpoint types.String `tfsdk:"sqlserverflex_custom_endpoint"`
|
SQLServerFlexCustomEndpoint types.String `tfsdk:"sqlserverflex_custom_endpoint"`
|
||||||
SKECustomEndpoint types.String `tfsdk:"ske_custom_endpoint"`
|
SKECustomEndpoint types.String `tfsdk:"ske_custom_endpoint"`
|
||||||
ServerBackupCustomEndpoint types.String `tfsdk:"server_backup_custom_endpoint"`
|
ServerBackupCustomEndpoint types.String `tfsdk:"server_backup_custom_endpoint"`
|
||||||
|
ServerUpdateCustomEndpoint types.String `tfsdk:"server_update_custom_endpoint"`
|
||||||
ResourceManagerCustomEndpoint types.String `tfsdk:"resourcemanager_custom_endpoint"`
|
ResourceManagerCustomEndpoint types.String `tfsdk:"resourcemanager_custom_endpoint"`
|
||||||
TokenCustomEndpoint types.String `tfsdk:"token_custom_endpoint"`
|
TokenCustomEndpoint types.String `tfsdk:"token_custom_endpoint"`
|
||||||
EnableBetaResources types.Bool `tfsdk:"enable_beta_resources"`
|
EnableBetaResources types.Bool `tfsdk:"enable_beta_resources"`
|
||||||
|
|
@ -151,6 +153,7 @@ func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *pro
|
||||||
"postgresflex_custom_endpoint": "Custom endpoint for the PostgresFlex service",
|
"postgresflex_custom_endpoint": "Custom endpoint for the PostgresFlex service",
|
||||||
"redis_custom_endpoint": "Custom endpoint for the Redis service",
|
"redis_custom_endpoint": "Custom endpoint for the Redis service",
|
||||||
"server_backup_custom_endpoint": "Custom endpoint for the Server Backup service",
|
"server_backup_custom_endpoint": "Custom endpoint for the Server Backup service",
|
||||||
|
"server_update_custom_endpoint": "Custom endpoint for the Server Update service",
|
||||||
"resourcemanager_custom_endpoint": "Custom endpoint for the Resource Manager service",
|
"resourcemanager_custom_endpoint": "Custom endpoint for the Resource Manager service",
|
||||||
"secretsmanager_custom_endpoint": "Custom endpoint for the Secrets Manager service",
|
"secretsmanager_custom_endpoint": "Custom endpoint for the Secrets Manager service",
|
||||||
"sqlserverflex_custom_endpoint": "Custom endpoint for the SQL Server Flex service",
|
"sqlserverflex_custom_endpoint": "Custom endpoint for the SQL Server Flex service",
|
||||||
|
|
@ -271,6 +274,10 @@ func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *pro
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Description: descriptions["server_backup_custom_endpoint"],
|
Description: descriptions["server_backup_custom_endpoint"],
|
||||||
},
|
},
|
||||||
|
"server_update_custom_endpoint": schema.StringAttribute{
|
||||||
|
Optional: true,
|
||||||
|
Description: descriptions["server_update_custom_endpoint"],
|
||||||
|
},
|
||||||
"service_enablement_custom_endpoint": schema.StringAttribute{
|
"service_enablement_custom_endpoint": schema.StringAttribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Description: descriptions["service_enablement_custom_endpoint"],
|
Description: descriptions["service_enablement_custom_endpoint"],
|
||||||
|
|
@ -446,6 +453,8 @@ func (p *Provider) DataSources(_ context.Context) []func() datasource.DataSource
|
||||||
sqlServerFlexUser.NewUserDataSource,
|
sqlServerFlexUser.NewUserDataSource,
|
||||||
serverBackupSchedule.NewScheduleDataSource,
|
serverBackupSchedule.NewScheduleDataSource,
|
||||||
serverBackupSchedule.NewSchedulesDataSource,
|
serverBackupSchedule.NewSchedulesDataSource,
|
||||||
|
serverUpdateSchedule.NewScheduleDataSource,
|
||||||
|
serverUpdateSchedule.NewSchedulesDataSource,
|
||||||
skeProject.NewProjectDataSource,
|
skeProject.NewProjectDataSource,
|
||||||
skeCluster.NewClusterDataSource,
|
skeCluster.NewClusterDataSource,
|
||||||
}
|
}
|
||||||
|
|
@ -503,6 +512,7 @@ func (p *Provider) Resources(_ context.Context) []func() resource.Resource {
|
||||||
sqlServerFlexInstance.NewInstanceResource,
|
sqlServerFlexInstance.NewInstanceResource,
|
||||||
sqlServerFlexUser.NewUserResource,
|
sqlServerFlexUser.NewUserResource,
|
||||||
serverBackupSchedule.NewScheduleResource,
|
serverBackupSchedule.NewScheduleResource,
|
||||||
|
serverUpdateSchedule.NewScheduleResource,
|
||||||
skeProject.NewProjectResource,
|
skeProject.NewProjectResource,
|
||||||
skeCluster.NewClusterResource,
|
skeCluster.NewClusterResource,
|
||||||
skeKubeconfig.NewKubeconfigResource,
|
skeKubeconfig.NewKubeconfigResource,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue