ref 635294: server update schedules (#607)

Signed-off-by: Adrian Nackov <adrian.nackov@mail.schwarz>
This commit is contained in:
a_nackov 2024-12-12 19:20:29 +02:00 committed by GitHub
parent 1e1507bd96
commit 100704c0f4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 1564 additions and 1 deletions

View file

@ -0,0 +1,452 @@
package schedule
import (
"context"
"fmt"
"net/http"
"strconv"
"strings"
"github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/conversion"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate"
"github.com/stackitcloud/stackit-sdk-go/core/config"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
)
// resourceBetaCheckDone is used to prevent multiple checks for beta resources.
// This is a workaround for the lack of a global state in the provider and
// needs to exist because the Configure method is called twice.
var resourceBetaCheckDone bool
// Ensure the implementation satisfies the expected interfaces.
var (
_ resource.Resource = &scheduleResource{}
_ resource.ResourceWithConfigure = &scheduleResource{}
_ resource.ResourceWithImportState = &scheduleResource{}
)
type Model struct {
ID types.String `tfsdk:"id"`
ProjectId types.String `tfsdk:"project_id"`
ServerId types.String `tfsdk:"server_id"`
UpdateScheduleId types.Int64 `tfsdk:"update_schedule_id"`
Name types.String `tfsdk:"name"`
Rrule types.String `tfsdk:"rrule"`
Enabled types.Bool `tfsdk:"enabled"`
MaintenanceWindow types.Int64 `tfsdk:"maintenance_window"`
}
// NewScheduleResource is a helper function to simplify the provider implementation.
func NewScheduleResource() resource.Resource {
return &scheduleResource{}
}
// scheduleResource is the resource implementation.
type scheduleResource struct {
client *serverupdate.APIClient
}
// Metadata returns the resource type name.
func (r *scheduleResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_server_update_schedule"
}
// Configure adds the provider configured client to the resource.
func (r *scheduleResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
// Prevent panic if the provider has not been configured.
if req.ProviderData == nil {
return
}
providerData, ok := req.ProviderData.(core.ProviderData)
if !ok {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData))
return
}
if !resourceBetaCheckDone {
features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_update_schedule", "resource")
if resp.Diagnostics.HasError() {
return
}
resourceBetaCheckDone = true
}
var apiClient *serverupdate.APIClient
var err error
if providerData.ServerUpdateCustomEndpoint != "" {
ctx = tflog.SetField(ctx, "server_update_custom_endpoint", providerData.ServerUpdateCustomEndpoint)
apiClient, err = serverupdate.NewAPIClient(
config.WithCustomAuth(providerData.RoundTripper),
config.WithEndpoint(providerData.ServerUpdateCustomEndpoint),
)
} else {
apiClient, err = serverupdate.NewAPIClient(
config.WithCustomAuth(providerData.RoundTripper),
config.WithRegion(providerData.Region),
)
}
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", err))
return
}
r.client = apiClient
tflog.Info(ctx, "Server update client configured.")
}
// Schema defines the schema for the resource.
func (r *scheduleResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
resp.Schema = schema.Schema{
Description: "Server update schedule resource schema. Must have a `region` specified in the provider configuration.",
MarkdownDescription: features.AddBetaDescription("Server update schedule resource schema. Must have a `region` specified in the provider configuration."),
Attributes: map[string]schema.Attribute{
"id": schema.StringAttribute{
Description: "Terraform's internal resource identifier. It is structured as \"`project_id`,`server_id`,`update_schedule_id`\".",
Computed: true,
PlanModifiers: []planmodifier.String{
stringplanmodifier.UseStateForUnknown(),
},
},
"name": schema.StringAttribute{
Description: "The schedule name.",
Required: true,
PlanModifiers: []planmodifier.String{
stringplanmodifier.RequiresReplace(),
stringplanmodifier.UseStateForUnknown(),
},
Validators: []validator.String{
stringvalidator.LengthBetween(1, 255),
},
},
"update_schedule_id": schema.Int64Attribute{
Description: "Update schedule ID.",
Computed: true,
PlanModifiers: []planmodifier.Int64{
int64planmodifier.UseStateForUnknown(),
},
Validators: []validator.Int64{
int64validator.AtLeast(1),
},
},
"project_id": schema.StringAttribute{
Description: "STACKIT Project ID to which the server is associated.",
Required: true,
PlanModifiers: []planmodifier.String{
stringplanmodifier.RequiresReplace(),
stringplanmodifier.UseStateForUnknown(),
},
Validators: []validator.String{
validate.UUID(),
validate.NoSeparator(),
},
},
"server_id": schema.StringAttribute{
Description: "Server ID for the update schedule.",
Required: true,
PlanModifiers: []planmodifier.String{
stringplanmodifier.RequiresReplace(),
stringplanmodifier.UseStateForUnknown(),
},
Validators: []validator.String{
validate.UUID(),
validate.NoSeparator(),
},
},
"rrule": schema.StringAttribute{
Description: "Update schedule described in `rrule` (recurrence rule) format.",
Required: true,
PlanModifiers: []planmodifier.String{
stringplanmodifier.RequiresReplace(),
stringplanmodifier.UseStateForUnknown(),
},
Validators: []validator.String{
validate.Rrule(),
validate.NoSeparator(),
},
},
"enabled": schema.BoolAttribute{
Description: "Is the update schedule enabled or disabled.",
Required: true,
},
"maintenance_window": schema.Int64Attribute{
Description: "Maintenance window [1..24].",
Required: true,
Validators: []validator.Int64{
int64validator.AtLeast(1),
int64validator.AtMost(24),
},
},
},
}
}
// Create creates the resource and sets the initial Terraform state.
func (r *scheduleResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { // nolint:gocritic // function signature required by Terraform
var model Model
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "server_id", serverId)
// Enable updates if not already enabled
err := enableUpdatesService(ctx, &model, r.client)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Enabling server update project before creation: %v", err))
return
}
// Create new schedule
payload, err := toCreatePayload(&model)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Creating API payload: %v", err))
return
}
scheduleResp, err := r.client.CreateUpdateSchedule(ctx, projectId, serverId).CreateUpdateSchedulePayload(*payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Calling API: %v", err))
return
}
ctx = tflog.SetField(ctx, "update_schedule_id", *scheduleResp.Id)
// Map response body to schema
err = mapFields(scheduleResp, &model)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating server update schedule", fmt.Sprintf("Processing API payload: %v", err))
return
}
diags = resp.State.Set(ctx, model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
tflog.Info(ctx, "Server update schedule created.")
}
// Read refreshes the Terraform state with the latest data.
func (r *scheduleResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
var model Model
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
updateScheduleId := model.UpdateScheduleId.ValueInt64()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "server_id", serverId)
ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId)
scheduleResp, err := r.client.GetUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode == http.StatusNotFound {
resp.State.RemoveResource(ctx)
return
}
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading update schedule", fmt.Sprintf("Calling API: %v", err))
return
}
// Map response body to schema
err = mapFields(scheduleResp, &model)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading update schedule", fmt.Sprintf("Processing API payload: %v", err))
return
}
// Set refreshed state
diags = resp.State.Set(ctx, model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
tflog.Info(ctx, "Server update schedule read.")
}
// Update updates the resource and sets the updated Terraform state on success.
func (r *scheduleResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { // nolint:gocritic // function signature required by Terraform
var model Model
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
updateScheduleId := model.UpdateScheduleId.ValueInt64()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "server_id", serverId)
ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId)
// Update schedule
payload, err := toUpdatePayload(&model)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating server update schedule", fmt.Sprintf("Creating API payload: %v", err))
return
}
scheduleResp, err := r.client.UpdateUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).UpdateUpdateSchedulePayload(*payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating server update schedule", fmt.Sprintf("Calling API: %v", err))
return
}
// Map response body to schema
err = mapFields(scheduleResp, &model)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating server update schedule", fmt.Sprintf("Processing API payload: %v", err))
return
}
diags = resp.State.Set(ctx, model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
tflog.Info(ctx, "Server update schedule updated.")
}
// Delete deletes the resource and removes the Terraform state on success.
func (r *scheduleResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { // nolint:gocritic // function signature required by Terraform
var model Model
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
updateScheduleId := model.UpdateScheduleId.ValueInt64()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "server_id", serverId)
ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId)
err := r.client.DeleteUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting server update schedule", fmt.Sprintf("Calling API: %v", err))
return
}
tflog.Info(ctx, "Server update schedule deleted.")
}
// ImportState imports a resource into the Terraform state on success.
// The expected format of the resource import identifier is: // project_id,server_id,schedule_id
func (r *scheduleResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
idParts := strings.Split(req.ID, core.Separator)
if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
core.LogAndAddError(ctx, &resp.Diagnostics,
"Error importing server update schedule",
fmt.Sprintf("Expected import identifier with format [project_id],[server_id],[update_schedule_id], got %q", req.ID),
)
return
}
intId, err := strconv.ParseInt(idParts[2], 10, 64)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics,
"Error importing server update schedule",
fmt.Sprintf("Expected update_schedule_id to be int64, got %q", idParts[2]),
)
return
}
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("server_id"), idParts[1])...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("update_schedule_id"), intId)...)
tflog.Info(ctx, "Server update schedule state imported.")
}
func mapFields(schedule *serverupdate.UpdateSchedule, model *Model) error {
if schedule == nil {
return fmt.Errorf("response input is nil")
}
if model == nil {
return fmt.Errorf("model input is nil")
}
if schedule.Id == nil {
return fmt.Errorf("response id is nil")
}
model.UpdateScheduleId = types.Int64PointerValue(schedule.Id)
idParts := []string{
model.ProjectId.ValueString(),
model.ServerId.ValueString(),
strconv.FormatInt(model.UpdateScheduleId.ValueInt64(), 10),
}
model.ID = types.StringValue(
strings.Join(idParts, core.Separator),
)
model.Name = types.StringPointerValue(schedule.Name)
model.Rrule = types.StringPointerValue(schedule.Rrule)
model.Enabled = types.BoolPointerValue(schedule.Enabled)
model.MaintenanceWindow = types.Int64PointerValue(schedule.MaintenanceWindow)
return nil
}
// If already enabled, just continues
func enableUpdatesService(ctx context.Context, model *Model, client *serverupdate.APIClient) error {
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
enableServicePayload := serverupdate.EnableServicePayload{}
tflog.Debug(ctx, "Enabling server update service")
err := client.EnableService(ctx, projectId, serverId).EnableServicePayload(enableServicePayload).Execute()
if err != nil {
if strings.Contains(err.Error(), "Tried to activate already active service") {
tflog.Debug(ctx, "Service for server update already enabled")
return nil
}
return fmt.Errorf("enable server update service: %w", err)
}
tflog.Info(ctx, "Enabled server update service")
return nil
}
func toCreatePayload(model *Model) (*serverupdate.CreateUpdateSchedulePayload, error) {
if model == nil {
return nil, fmt.Errorf("nil model")
}
return &serverupdate.CreateUpdateSchedulePayload{
Enabled: conversion.BoolValueToPointer(model.Enabled),
Name: conversion.StringValueToPointer(model.Name),
Rrule: conversion.StringValueToPointer(model.Rrule),
MaintenanceWindow: conversion.Int64ValueToPointer(model.MaintenanceWindow),
}, nil
}
func toUpdatePayload(model *Model) (*serverupdate.UpdateUpdateSchedulePayload, error) {
if model == nil {
return nil, fmt.Errorf("nil model")
}
return &serverupdate.UpdateUpdateSchedulePayload{
Enabled: conversion.BoolValueToPointer(model.Enabled),
Name: conversion.StringValueToPointer(model.Name),
Rrule: conversion.StringValueToPointer(model.Rrule),
MaintenanceWindow: conversion.Int64ValueToPointer(model.MaintenanceWindow),
}, nil
}

View file

@ -0,0 +1,221 @@
package schedule
import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
sdk "github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
)
func TestMapFields(t *testing.T) {
tests := []struct {
description string
input *sdk.UpdateSchedule
expected Model
isValid bool
}{
{
"default_values",
&sdk.UpdateSchedule{
Id: utils.Ptr(int64(5)),
},
Model{
ID: types.StringValue("project_uid,server_uid,5"),
ProjectId: types.StringValue("project_uid"),
ServerId: types.StringValue("server_uid"),
UpdateScheduleId: types.Int64Value(5),
},
true,
},
{
"simple_values",
&sdk.UpdateSchedule{
Id: utils.Ptr(int64(5)),
Enabled: utils.Ptr(true),
Name: utils.Ptr("update_schedule_name_1"),
Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
MaintenanceWindow: utils.Ptr(int64(1)),
},
Model{
ServerId: types.StringValue("server_uid"),
ProjectId: types.StringValue("project_uid"),
UpdateScheduleId: types.Int64Value(5),
ID: types.StringValue("project_uid,server_uid,5"),
Name: types.StringValue("update_schedule_name_1"),
Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
Enabled: types.BoolValue(true),
MaintenanceWindow: types.Int64Value(1),
},
true,
},
{
"nil_response",
nil,
Model{},
false,
},
{
"no_resource_id",
&sdk.UpdateSchedule{},
Model{},
false,
},
}
for _, tt := range tests {
t.Run(tt.description, func(t *testing.T) {
state := &Model{
ProjectId: tt.expected.ProjectId,
ServerId: tt.expected.ServerId,
}
err := mapFields(tt.input, state)
if !tt.isValid && err == nil {
t.Fatalf("Should have failed")
}
if tt.isValid && err != nil {
t.Fatalf("Should not have failed: %v", err)
}
if tt.isValid {
diff := cmp.Diff(state, &tt.expected)
if diff != "" {
t.Fatalf("Data does not match: %s", diff)
}
}
})
}
}
func TestToCreatePayload(t *testing.T) {
tests := []struct {
description string
input *Model
expected *sdk.CreateUpdateSchedulePayload
isValid bool
}{
{
"default_values",
&Model{},
&sdk.CreateUpdateSchedulePayload{},
true,
},
{
"simple_values",
&Model{
Name: types.StringValue("name"),
Enabled: types.BoolValue(true),
Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
MaintenanceWindow: types.Int64Value(1),
},
&sdk.CreateUpdateSchedulePayload{
Name: utils.Ptr("name"),
Enabled: utils.Ptr(true),
Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
MaintenanceWindow: utils.Ptr(int64(1)),
},
true,
},
{
"null_fields_and_int_conversions",
&Model{
Name: types.StringValue(""),
Rrule: types.StringValue(""),
},
&sdk.CreateUpdateSchedulePayload{
Name: utils.Ptr(""),
Rrule: utils.Ptr(""),
},
true,
},
{
"nil_model",
nil,
nil,
false,
},
}
for _, tt := range tests {
t.Run(tt.description, func(t *testing.T) {
output, err := toCreatePayload(tt.input)
if !tt.isValid && err == nil {
t.Fatalf("Should have failed")
}
if tt.isValid && err != nil {
t.Fatalf("Should not have failed: %v", err)
}
if tt.isValid {
diff := cmp.Diff(output, tt.expected)
if diff != "" {
t.Fatalf("Data does not match: %s", diff)
}
}
})
}
}
func TestToUpdatePayload(t *testing.T) {
tests := []struct {
description string
input *Model
expected *sdk.UpdateUpdateSchedulePayload
isValid bool
}{
{
"default_values",
&Model{},
&sdk.UpdateUpdateSchedulePayload{},
true,
},
{
"simple_values",
&Model{
Name: types.StringValue("name"),
Enabled: types.BoolValue(true),
Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
MaintenanceWindow: types.Int64Value(1),
},
&sdk.UpdateUpdateSchedulePayload{
Name: utils.Ptr("name"),
Enabled: utils.Ptr(true),
Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
MaintenanceWindow: utils.Ptr(int64(1)),
},
true,
},
{
"null_fields_and_int_conversions",
&Model{
Name: types.StringValue(""),
Rrule: types.StringValue(""),
},
&sdk.UpdateUpdateSchedulePayload{
Name: utils.Ptr(""),
Rrule: utils.Ptr(""),
},
true,
},
{
"nil_model",
nil,
nil,
false,
},
}
for _, tt := range tests {
t.Run(tt.description, func(t *testing.T) {
output, err := toUpdatePayload(tt.input)
if !tt.isValid && err == nil {
t.Fatalf("Should have failed")
}
if tt.isValid && err != nil {
t.Fatalf("Should not have failed: %v", err)
}
if tt.isValid {
diff := cmp.Diff(output, tt.expected)
if diff != "" {
t.Fatalf("Data does not match: %s", diff)
}
}
})
}
}

View file

@ -0,0 +1,182 @@
package schedule
import (
"context"
"fmt"
"net/http"
"strconv"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate"
"github.com/stackitcloud/stackit-sdk-go/core/config"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
)
// scheduleDataSourceBetaCheckDone is used to prevent multiple checks for beta resources.
// This is a workaround for the lack of a global state in the provider and
// needs to exist because the Configure method is called twice.
var scheduleDataSourceBetaCheckDone bool
// Ensure the implementation satisfies the expected interfaces.
var (
_ datasource.DataSource = &scheduleDataSource{}
)
// NewScheduleDataSource is a helper function to simplify the provider implementation.
func NewScheduleDataSource() datasource.DataSource {
return &scheduleDataSource{}
}
// scheduleDataSource is the data source implementation.
type scheduleDataSource struct {
client *serverupdate.APIClient
}
// Metadata returns the data source type name.
func (r *scheduleDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_server_update_schedule"
}
// Configure adds the provider configured client to the data source.
func (r *scheduleDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
// Prevent panic if the provider has not been configured.
if req.ProviderData == nil {
return
}
providerData, ok := req.ProviderData.(core.ProviderData)
if !ok {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData))
return
}
if !scheduleDataSourceBetaCheckDone {
features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_update_schedule", "data source")
if resp.Diagnostics.HasError() {
return
}
scheduleDataSourceBetaCheckDone = true
}
var apiClient *serverupdate.APIClient
var err error
if providerData.ServerUpdateCustomEndpoint != "" {
ctx = tflog.SetField(ctx, "server_update_custom_endpoint", providerData.ServerUpdateCustomEndpoint)
apiClient, err = serverupdate.NewAPIClient(
config.WithCustomAuth(providerData.RoundTripper),
config.WithEndpoint(providerData.ServerUpdateCustomEndpoint),
)
} else {
apiClient, err = serverupdate.NewAPIClient(
config.WithCustomAuth(providerData.RoundTripper),
config.WithRegion(providerData.Region),
)
}
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the data source configuration", err))
return
}
r.client = apiClient
tflog.Info(ctx, "Server update client configured")
}
// Schema defines the schema for the data source.
func (r *scheduleDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
resp.Schema = schema.Schema{
Description: "Server update schedule datasource schema. Must have a `region` specified in the provider configuration.",
MarkdownDescription: features.AddBetaDescription("Server update schedule datasource schema. Must have a `region` specified in the provider configuration."),
Attributes: map[string]schema.Attribute{
"id": schema.StringAttribute{
Description: "Terraform's internal resource identifier. It is structured as \"`project_id`,`server_id`,`update_schedule_id`\".",
Computed: true,
},
"name": schema.StringAttribute{
Description: "The schedule name.",
Computed: true,
},
"update_schedule_id": schema.Int64Attribute{
Description: "Update schedule ID.",
Required: true,
},
"project_id": schema.StringAttribute{
Description: "STACKIT Project ID to which the server is associated.",
Required: true,
Validators: []validator.String{
validate.UUID(),
validate.NoSeparator(),
},
},
"server_id": schema.StringAttribute{
Description: "Server ID for the update schedule.",
Required: true,
Validators: []validator.String{
validate.UUID(),
validate.NoSeparator(),
},
},
"rrule": schema.StringAttribute{
Description: "Update schedule described in `rrule` (recurrence rule) format.",
Computed: true,
},
"enabled": schema.BoolAttribute{
Description: "Is the update schedule enabled or disabled.",
Computed: true,
},
"maintenance_window": schema.Int64Attribute{
Description: "Maintenance window [1..24].",
Computed: true,
},
},
}
}
// Read refreshes the Terraform state with the latest data.
func (r *scheduleDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
var model Model
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
updateScheduleId := model.UpdateScheduleId.ValueInt64()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "server_id", serverId)
ctx = tflog.SetField(ctx, "update_schedule_id", updateScheduleId)
scheduleResp, err := r.client.GetUpdateSchedule(ctx, projectId, serverId, strconv.FormatInt(updateScheduleId, 10)).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode == http.StatusNotFound {
resp.State.RemoveResource(ctx)
}
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedule", fmt.Sprintf("Calling API: %v", err))
return
}
// Map response body to schema
err = mapFields(scheduleResp, &model)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedule", fmt.Sprintf("Processing API payload: %v", err))
return
}
// Set refreshed state
diags = resp.State.Set(ctx, model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
tflog.Info(ctx, "Server update schedule read")
}

View file

@ -0,0 +1,233 @@
package schedule
import (
"context"
"fmt"
"net/http"
"strings"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/stackitcloud/stackit-sdk-go/core/config"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
)
// scheduleDataSourceBetaCheckDone is used to prevent multiple checks for beta resources.
// This is a workaround for the lack of a global state in the provider and
// needs to exist because the Configure method is called twice.
var schedulesDataSourceBetaCheckDone bool
// Ensure the implementation satisfies the expected interfaces.
var (
_ datasource.DataSource = &schedulesDataSource{}
)
// NewSchedulesDataSource is a helper function to simplify the provider implementation.
func NewSchedulesDataSource() datasource.DataSource {
return &schedulesDataSource{}
}
// schedulesDataSource is the data source implementation.
type schedulesDataSource struct {
client *serverupdate.APIClient
}
// Metadata returns the data source type name.
func (r *schedulesDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_server_update_schedules"
}
// Configure adds the provider configured client to the data source.
func (r *schedulesDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
// Prevent panic if the provider has not been configured.
if req.ProviderData == nil {
return
}
providerData, ok := req.ProviderData.(core.ProviderData)
if !ok {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData))
return
}
if !schedulesDataSourceBetaCheckDone {
features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_update_schedules", "data source")
if resp.Diagnostics.HasError() {
return
}
schedulesDataSourceBetaCheckDone = true
}
var apiClient *serverupdate.APIClient
var err error
if providerData.ServerUpdateCustomEndpoint != "" {
ctx = tflog.SetField(ctx, "server_update_custom_endpoint", providerData.ServerUpdateCustomEndpoint)
apiClient, err = serverupdate.NewAPIClient(
config.WithCustomAuth(providerData.RoundTripper),
config.WithEndpoint(providerData.ServerUpdateCustomEndpoint),
)
} else {
apiClient, err = serverupdate.NewAPIClient(
config.WithCustomAuth(providerData.RoundTripper),
config.WithRegion(providerData.Region),
)
}
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the data source configuration", err))
return
}
r.client = apiClient
tflog.Info(ctx, "Server update client configured")
}
// Schema defines the schema for the data source.
func (r *schedulesDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
resp.Schema = schema.Schema{
Description: "Server update schedules datasource schema. Must have a `region` specified in the provider configuration.",
MarkdownDescription: features.AddBetaDescription("Server update schedules datasource schema. Must have a `region` specified in the provider configuration."),
Attributes: map[string]schema.Attribute{
"id": schema.StringAttribute{
Description: "Terraform's internal data source identifier. It is structured as \"`project_id`,`server_id`\".",
Computed: true,
},
"project_id": schema.StringAttribute{
Description: "STACKIT Project ID (UUID) to which the server is associated.",
Required: true,
Validators: []validator.String{
validate.UUID(),
validate.NoSeparator(),
},
},
"server_id": schema.StringAttribute{
Description: "Server ID (UUID) to which the update schedule is associated.",
Required: true,
Validators: []validator.String{
validate.UUID(),
validate.NoSeparator(),
},
},
"items": schema.ListNestedAttribute{
Computed: true,
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"update_schedule_id": schema.Int64Attribute{
Computed: true,
},
"name": schema.StringAttribute{
Description: "The update schedule name.",
Computed: true,
},
"rrule": schema.StringAttribute{
Description: "Update schedule described in `rrule` (recurrence rule) format.",
Computed: true,
},
"enabled": schema.BoolAttribute{
Description: "Is the update schedule enabled or disabled.",
Computed: true,
},
"maintenance_window": schema.Int64Attribute{
Description: "Maintenance window [1..24].",
Computed: true,
},
},
},
},
},
}
}
// schedulesDataSourceModel maps the data source schema data.
type schedulesDataSourceModel struct {
ID types.String `tfsdk:"id"`
ProjectId types.String `tfsdk:"project_id"`
ServerId types.String `tfsdk:"server_id"`
Items []schedulesDatasourceItemModel `tfsdk:"items"`
}
// schedulesDatasourceItemModel maps schedule schema data.
type schedulesDatasourceItemModel struct {
UpdateScheduleId types.Int64 `tfsdk:"update_schedule_id"`
Name types.String `tfsdk:"name"`
Rrule types.String `tfsdk:"rrule"`
Enabled types.Bool `tfsdk:"enabled"`
MaintenanceWindow types.Int64 `tfsdk:"maintenance_window"`
}
// Read refreshes the Terraform state with the latest data.
func (r *schedulesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
var model schedulesDataSourceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "server_id", serverId)
schedules, err := r.client.ListUpdateSchedules(ctx, projectId, serverId).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode == http.StatusNotFound {
resp.State.RemoveResource(ctx)
}
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedules", fmt.Sprintf("Calling API: %v", err))
return
}
// Map response body to schema
err = mapSchedulesDatasourceFields(ctx, schedules, &model)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server update schedules", fmt.Sprintf("Processing API payload: %v", err))
return
}
// Set refreshed state
diags = resp.State.Set(ctx, model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
tflog.Info(ctx, "Server update schedules read")
}
func mapSchedulesDatasourceFields(ctx context.Context, schedules *serverupdate.GetUpdateSchedulesResponse, model *schedulesDataSourceModel) error {
if schedules == nil {
return fmt.Errorf("response input is nil")
}
if model == nil {
return fmt.Errorf("model input is nil")
}
tflog.Debug(ctx, "response", map[string]any{"schedules": schedules})
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
idParts := []string{projectId, serverId}
model.ID = types.StringValue(
strings.Join(idParts, core.Separator),
)
for _, schedule := range *schedules.Items {
scheduleState := schedulesDatasourceItemModel{
UpdateScheduleId: types.Int64Value(*schedule.Id),
Name: types.StringValue(*schedule.Name),
Rrule: types.StringValue(*schedule.Rrule),
Enabled: types.BoolValue(*schedule.Enabled),
MaintenanceWindow: types.Int64Value(*schedule.MaintenanceWindow),
}
model.Items = append(model.Items, scheduleState)
}
return nil
}

View file

@ -0,0 +1,91 @@
package schedule
import (
"context"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
sdk "github.com/stackitcloud/stackit-sdk-go/services/serverupdate"
)
func TestMapSchedulesDataSourceFields(t *testing.T) {
tests := []struct {
description string
input *sdk.GetUpdateSchedulesResponse
expected schedulesDataSourceModel
isValid bool
}{
{
"empty response",
&sdk.GetUpdateSchedulesResponse{
Items: &[]sdk.UpdateSchedule{},
},
schedulesDataSourceModel{
ID: types.StringValue("project_uid,server_uid"),
ProjectId: types.StringValue("project_uid"),
ServerId: types.StringValue("server_uid"),
Items: nil,
},
true,
},
{
"simple_values",
&sdk.GetUpdateSchedulesResponse{
Items: &[]sdk.UpdateSchedule{
{
Id: utils.Ptr(int64(5)),
Enabled: utils.Ptr(true),
Name: utils.Ptr("update_schedule_name_1"),
Rrule: utils.Ptr("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
MaintenanceWindow: utils.Ptr(int64(1)),
},
},
},
schedulesDataSourceModel{
ID: types.StringValue("project_uid,server_uid"),
ServerId: types.StringValue("server_uid"),
ProjectId: types.StringValue("project_uid"),
Items: []schedulesDatasourceItemModel{
{
UpdateScheduleId: types.Int64Value(5),
Name: types.StringValue("update_schedule_name_1"),
Rrule: types.StringValue("DTSTART;TZID=Europe/Sofia:20200803T023000 RRULE:FREQ=DAILY;INTERVAL=1"),
Enabled: types.BoolValue(true),
MaintenanceWindow: types.Int64Value(1),
},
},
},
true,
},
{
"nil_response",
nil,
schedulesDataSourceModel{},
false,
},
}
for _, tt := range tests {
t.Run(tt.description, func(t *testing.T) {
state := &schedulesDataSourceModel{
ProjectId: tt.expected.ProjectId,
ServerId: tt.expected.ServerId,
}
ctx := context.TODO()
err := mapSchedulesDatasourceFields(ctx, tt.input, state)
if !tt.isValid && err == nil {
t.Fatalf("Should have failed")
}
if tt.isValid && err != nil {
t.Fatalf("Should not have failed: %v", err)
}
if tt.isValid {
diff := cmp.Diff(state, &tt.expected)
if diff != "" {
t.Fatalf("Data does not match: %s", diff)
}
}
})
}
}