Addd regional support for serverbackup resource and data-source (#745)

* feat(serverbackup): add regional support

* fix: corrections for review findings
This commit is contained in:
Rüdiger Schmitz 2025-03-28 09:24:52 +01:00 committed by GitHub
parent 8b57c35712
commit a870b71d0a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 220 additions and 89 deletions

View file

@ -12,6 +12,7 @@ import (
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/core"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/features"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/utils"
"github.com/stackitcloud/terraform-provider-stackit/stackit/internal/validate"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
@ -37,7 +38,8 @@ func NewSchedulesDataSource() datasource.DataSource {
// schedulesDataSource is the data source implementation.
type schedulesDataSource struct {
client *serverbackup.APIClient
client *serverbackup.APIClient
providerData core.ProviderData
}
// Metadata returns the data source type name.
@ -52,14 +54,15 @@ func (r *schedulesDataSource) Configure(ctx context.Context, req datasource.Conf
return
}
providerData, ok := req.ProviderData.(core.ProviderData)
var ok bool
r.providerData, ok = req.ProviderData.(core.ProviderData)
if !ok {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error configuring API client", fmt.Sprintf("Expected configure type stackit.ProviderData, got %T", req.ProviderData))
return
}
if !schedulesDataSourceBetaCheckDone {
features.CheckBetaResourcesEnabled(ctx, &providerData, &resp.Diagnostics, "stackit_server_backup_schedules", "data source")
features.CheckBetaResourcesEnabled(ctx, &r.providerData, &resp.Diagnostics, "stackit_server_backup_schedules", "data source")
if resp.Diagnostics.HasError() {
return
}
@ -68,16 +71,15 @@ func (r *schedulesDataSource) Configure(ctx context.Context, req datasource.Conf
var apiClient *serverbackup.APIClient
var err error
if providerData.ServerBackupCustomEndpoint != "" {
ctx = tflog.SetField(ctx, "server_backup_custom_endpoint", providerData.ServerBackupCustomEndpoint)
if r.providerData.ServerBackupCustomEndpoint != "" {
ctx = tflog.SetField(ctx, "server_backup_custom_endpoint", r.providerData.ServerBackupCustomEndpoint)
apiClient, err = serverbackup.NewAPIClient(
config.WithCustomAuth(providerData.RoundTripper),
config.WithEndpoint(providerData.ServerBackupCustomEndpoint),
config.WithCustomAuth(r.providerData.RoundTripper),
config.WithEndpoint(r.providerData.ServerBackupCustomEndpoint),
)
} else {
apiClient, err = serverbackup.NewAPIClient(
config.WithCustomAuth(providerData.RoundTripper),
config.WithRegion(providerData.GetRegion()),
config.WithCustomAuth(r.providerData.RoundTripper),
)
}
@ -154,6 +156,11 @@ func (r *schedulesDataSource) Schema(_ context.Context, _ datasource.SchemaReque
},
},
},
"region": schema.StringAttribute{
// the region cannot be found, so it has to be passed
Optional: true,
Description: "The resource region. If not defined, the provider region is used.",
},
},
}
}
@ -164,6 +171,7 @@ type schedulesDataSourceModel struct {
ProjectId types.String `tfsdk:"project_id"`
ServerId types.String `tfsdk:"server_id"`
Items []schedulesDatasourceItemModel `tfsdk:"items"`
Region types.String `tfsdk:"region"`
}
// schedulesDatasourceItemModel maps schedule schema data.
@ -185,10 +193,17 @@ func (r *schedulesDataSource) Read(ctx context.Context, req datasource.ReadReque
}
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
var region string
if utils.IsUndefined(model.Region) {
region = r.providerData.GetRegion()
} else {
region = model.Region.ValueString()
}
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "server_id", serverId)
ctx = tflog.SetField(ctx, "region", region)
schedules, err := r.client.ListBackupSchedules(ctx, projectId, serverId).Execute()
schedules, err := r.client.ListBackupSchedules(ctx, projectId, serverId, region).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode == http.StatusNotFound {
@ -199,7 +214,7 @@ func (r *schedulesDataSource) Read(ctx context.Context, req datasource.ReadReque
}
// Map response body to schema
err = mapSchedulesDatasourceFields(ctx, schedules, &model)
err = mapSchedulesDatasourceFields(ctx, schedules, &model, region)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading server backup schedules", fmt.Sprintf("Processing API payload: %v", err))
return
@ -214,7 +229,7 @@ func (r *schedulesDataSource) Read(ctx context.Context, req datasource.ReadReque
tflog.Info(ctx, "Server backup schedules read")
}
func mapSchedulesDatasourceFields(ctx context.Context, schedules *serverbackup.GetBackupSchedulesResponse, model *schedulesDataSourceModel) error {
func mapSchedulesDatasourceFields(ctx context.Context, schedules *serverbackup.GetBackupSchedulesResponse, model *schedulesDataSourceModel, region string) error {
if schedules == nil {
return fmt.Errorf("response input is nil")
}
@ -226,10 +241,11 @@ func mapSchedulesDatasourceFields(ctx context.Context, schedules *serverbackup.G
projectId := model.ProjectId.ValueString()
serverId := model.ServerId.ValueString()
idParts := []string{projectId, serverId}
idParts := []string{projectId, region, serverId}
model.ID = types.StringValue(
strings.Join(idParts, core.Separator),
)
model.Region = types.StringValue(region)
for _, schedule := range *schedules.Items {
scheduleState := schedulesDatasourceItemModel{