diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go
index df038609..3351d5da 100644
--- a/cmd/cmd/build/build.go
+++ b/cmd/cmd/build/build.go
@@ -254,10 +254,12 @@ func (b *Builder) Build() error {
}
type templateData struct {
- PackageName string
- NameCamel string
- NamePascal string
- NameSnake string
+ PackageName string
+ PackageNameCamel string
+ PackageNamePascal string
+ NameCamel string
+ NamePascal string
+ NameSnake string
}
func fileExists(path string) bool {
@@ -310,7 +312,7 @@ func createBoilerplate(rootFolder, folder string) error {
foundRes = fileExists(resGoFile)
if handleDS && !foundDS {
- slog.Info("Creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
+ slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
@@ -318,13 +320,15 @@ func createBoilerplate(rootFolder, folder string) error {
tplName := "data_source_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
- path.Join(rootFolder, "tools", "templates", tplName),
+ path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
path.Join(folder, svc.Name(), res.Name(), "datasource.go"),
&templateData{
- PackageName: svc.Name(),
- NameCamel: ToCamelCase(resourceName),
- NamePascal: ToPascalCase(resourceName),
- NameSnake: resourceName,
+ PackageName: svc.Name(),
+ PackageNameCamel: ToCamelCase(svc.Name()),
+ PackageNamePascal: ToPascalCase(svc.Name()),
+ NameCamel: ToCamelCase(resourceName),
+ NamePascal: ToPascalCase(resourceName),
+ NameSnake: resourceName,
},
)
if err != nil {
@@ -333,7 +337,7 @@ func createBoilerplate(rootFolder, folder string) error {
}
if handleRes && !foundRes {
- slog.Info("Creating missing resource.go", "service", svc.Name(), "resource", resourceName)
+ slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
@@ -341,18 +345,45 @@ func createBoilerplate(rootFolder, folder string) error {
tplName := "resource_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
- path.Join(rootFolder, "tools", "templates", tplName),
+ path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
path.Join(folder, svc.Name(), res.Name(), "resource.go"),
&templateData{
- PackageName: svc.Name(),
- NameCamel: ToCamelCase(resourceName),
- NamePascal: ToPascalCase(resourceName),
- NameSnake: resourceName,
+ PackageName: svc.Name(),
+ PackageNameCamel: ToCamelCase(svc.Name()),
+ PackageNamePascal: ToPascalCase(svc.Name()),
+ NameCamel: ToCamelCase(resourceName),
+ NamePascal: ToPascalCase(resourceName),
+ NameSnake: resourceName,
},
)
if err != nil {
return err
}
+
+ if !fileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
+ slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
+ if !ValidateSnakeCase(resourceName) {
+ return errors.New("resource name is invalid")
+ }
+ fncTplName := "functions_scaffold.gotmpl"
+ err = writeTemplateToFile(
+ fncTplName,
+ path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName),
+ path.Join(folder, svc.Name(), res.Name(), "functions.go"),
+ &templateData{
+ PackageName: svc.Name(),
+ PackageNameCamel: ToCamelCase(svc.Name()),
+ PackageNamePascal: ToPascalCase(svc.Name()),
+ NameCamel: ToCamelCase(resourceName),
+ NamePascal: ToPascalCase(resourceName),
+ NameSnake: resourceName,
+ },
+ )
+ if err != nil {
+ return err
+ }
+
+ }
}
}
}
diff --git a/cmd/cmd/build/templates/data_source_scaffold.gotmpl b/cmd/cmd/build/templates/data_source_scaffold.gotmpl
index d13021c7..74fc0f91 100644
--- a/cmd/cmd/build/templates/data_source_scaffold.gotmpl
+++ b/cmd/cmd/build/templates/data_source_scaffold.gotmpl
@@ -2,24 +2,32 @@ package {{.PackageName}}
import (
"context"
+ "fmt"
+ "net/http"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg/{{.PackageName}}"
+ {{.PackageName}}Pkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
+
+ {{.PackageName}}Utils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/utils"
{{.PackageName}}Gen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/datasources_gen"
)
var _ datasource.DataSource = (*{{.NameCamel}}DataSource)(nil)
+const errorPrefix = "[{{.PackageNamePascal}} - {{.NamePascal}}]"
+
func New{{.NamePascal}}DataSource() datasource.DataSource {
return &{{.NameCamel}}DataSource{}
}
type {{.NameCamel}}DataSource struct{
- client *{{.PackageName}}.APIClient
+ client *{{.PackageName}}Pkg.APIClient
providerData core.ProviderData
}
@@ -31,8 +39,28 @@ func (d *{{.NameCamel}}DataSource) Schema(ctx context.Context, _ datasource.Sche
resp.Schema = {{.PackageName}}Gen.{{.NamePascal}}DataSourceSchema(ctx)
}
+// Configure adds the provider configured client to the data source.
+func (d *{{.NameCamel}}DataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
+ var ok bool
+ d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClient := {{.PackageName}}Utils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ d.client = apiClient
+ tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
+}
+
func (d *{{.NameCamel}}DataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data {{.PackageName}}Gen.{{.NameCamel}}Model
+ var data {{.PackageName}}Gen.{{.NamePascal}}Model
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
@@ -41,11 +69,51 @@ func (d *{{.NameCamel}}DataSource) Read(ctx context.Context, req datasource.Read
return
}
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := data.ProjectId.ValueString()
+ region := d.providerData.GetRegionWithOverride(data.Region)
+ {{.NameCamel}}Id := data.{{.NamePascal}}Id.ValueString()
+
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "{{.NameCamel}}_id", {{.NameCamel}}Id)
+
+ {{.NameCamel}}Resp, err := d.client.Get{{.NamePascal}}Request(ctx, projectId, region, {{.NameCamel}}Id).Execute()
+ if err != nil {
+ utils.LogError(
+ ctx,
+ &resp.Diagnostics,
+ err,
+ "Reading {{.NameCamel}}",
+ fmt.Sprintf("{{.NameCamel}} with ID %q does not exist in project %q.", {{.NameCamel}}Id, projectId),
+ map[int]string{
+ http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
+ },
+ )
+ resp.State.RemoveResource(ctx)
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+
// Todo: Read API call logic
// Example data value setting
// data.Id = types.StringValue("example-id")
+ err = mapResponseToModel(ctx, {{.NameCamel}}Resp, &data, resp.Diagnostics)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ fmt.Sprintf("%s Read", errorPrefix),
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
+ return
+ }
+
// Save data into Terraform state
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
diff --git a/cmd/cmd/build/templates/functions_scaffold.gotmpl b/cmd/cmd/build/templates/functions_scaffold.gotmpl
new file mode 100644
index 00000000..de4d2dbe
--- /dev/null
+++ b/cmd/cmd/build/templates/functions_scaffold.gotmpl
@@ -0,0 +1,98 @@
+package {{.PackageName}}
+
+import (
+ "context"
+ "fmt"
+ "math"
+
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+
+ {{.PackageName}} "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
+ {{.PackageName}}ResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/instance/resources_gen"
+)
+
+func mapResponseToModel(
+ ctx context.Context,
+ resp *{{.PackageName}}.Get{{.NamePascal}}Response,
+ m *{{.PackageName}}ResGen.{{.NamePascal}}Model,
+ tfDiags diag.Diagnostics,
+) error {
+ // TODO: complete and refactor
+ m.Id = types.StringValue(resp.GetId())
+
+ /*
+ sampleList, diags := types.ListValueFrom(ctx, types.StringType, resp.GetList())
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf(
+ "error converting list response value",
+ )
+ }
+ sample, diags := {{.PackageName}}ResGen.NewSampleValue(
+ {{.PackageName}}ResGen.SampleValue{}.AttributeTypes(ctx),
+ map[string]attr.Value{
+ "field": types.StringValue(string(resp.GetField())),
+ },
+ )
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf(
+ "error converting sample response value",
+ "sample",
+ types.StringValue(string(resp.GetField())),
+ )
+ }
+ m.Sample = sample
+ */
+ return nil
+}
+
+func handleEncryption(
+ m *{{.PackageName}}ResGen.{{.NamePascal}}Model,
+ resp *{{.PackageName}}.Get{{.NamePascal}}Response,
+) {{.PackageName}}ResGen.EncryptionValue {
+ if !resp.HasEncryption() ||
+ resp.Encryption == nil ||
+ resp.Encryption.KekKeyId == nil ||
+ resp.Encryption.KekKeyRingId == nil ||
+ resp.Encryption.KekKeyVersion == nil ||
+ resp.Encryption.ServiceAccount == nil {
+
+ if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
+ return {{.PackageName}}ResGen.NewEncryptionValueNull()
+ }
+ return m.Encryption
+ }
+
+ enc := {{.PackageName}}ResGen.NewEncryptionValueNull()
+ if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
+ enc.KekKeyId = types.StringValue(kVal)
+ }
+ if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
+ enc.KekKeyRingId = types.StringValue(kkVal)
+ }
+ if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
+ enc.KekKeyVersion = types.StringValue(kkvVal)
+ }
+ if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
+ enc.ServiceAccount = types.StringValue(sa)
+ }
+ return enc
+}
+
+func toCreatePayload(
+ ctx context.Context,
+ model *{{.PackageName}}ResGen.{{.NamePascal}}Model,
+) (*{{.PackageName}}.Create{{.NamePascal}}RequestPayload, error) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+
+ return &{{.PackageName}}.Create{{.NamePascal}}RequestPayload{
+ // TODO: fill fields
+ }, nil
+}
diff --git a/cmd/cmd/build/templates/resource_scaffold.gotmpl b/cmd/cmd/build/templates/resource_scaffold.gotmpl
index 5c96fdae..e497c8ad 100644
--- a/cmd/cmd/build/templates/resource_scaffold.gotmpl
+++ b/cmd/cmd/build/templates/resource_scaffold.gotmpl
@@ -2,15 +2,24 @@ package {{.PackageName}}
import (
"context"
+ _ "embed"
+ "fmt"
+ "strings"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
+ "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
- {{.PackageName}}Gen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/resources_gen"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ {{.PackageName}}ResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/resources_gen"
)
var (
@@ -30,10 +39,10 @@ type {{.NameCamel}}Resource struct{
providerData core.ProviderData
}
-type InstanceResourceIdentityModel struct {
+type {{.NamePascal}}ResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
+ {{.NamePascal}}ID types.String `tfsdk:"instance_id"`
// TODO: implement further needed parts
}
@@ -42,7 +51,20 @@ func (r *{{.NameCamel}}Resource) Metadata(ctx context.Context, req resource.Meta
}
func (r *{{.NameCamel}}Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = {{.PackageName}}Gen.{{.NamePascal}}ResourceSchema(ctx)
+ schema = {{.PackageName}}ResGen.{{.NamePascal}}ResourceSchema(ctx)
+
+ fields, err := {{.PackageName}}Utils.ReadModifiersConfig(modifiersFileByte)
+ if err != nil {
+ resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
+ return
+ }
+
+ err = {{.PackageName}}Utils.AddPlanModifiersToResourceSchema(fields, &schema)
+ if err != nil {
+ resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
+ return
+ }
+ resp.Schema = schema
}
func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
@@ -78,112 +100,26 @@ func (r *{{.NameCamel}}Resource) Configure(
config.WithCustomAuth(r.providerData.RoundTripper),
utils.UserAgentConfigOption(r.providerData.Version),
}
- if r.providerData.PostgresFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.PostgresFlexCustomEndpoint))
+ if r.providerData.{{.PackageNamePascal}}CustomEndpoint != "" {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.{{.PackageName}}CustomEndpoint))
} else {
apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
}
apiClient, err := {{.PackageName}}.NewAPIClient(apiClientConfigOptions...)
if err != nil {
- resp.Diagnostics.AddError( "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", err))
+ resp.Diagnostics.AddError(
+ "Error configuring API client",
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
+ )
return
}
r.client = apiClient
tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} client configured")
}
-func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data {{.PackageName}}Gen.{{.NamePascal}}Model
-
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- // TODO: Create API call logic
-
- // Example data value setting
- data.{{.NameCamel | ucfirst}}Id = types.StringValue("id-from-response")
-
- // TODO: Set data returned by API in identity
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // TODO: implement wait handler if needed
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} created")
-}
-
-func (r *{{.NameCamel}}Resource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data {{.PackageName}}Gen.{{.NamePascal}}Model
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
-
- // Read identity data
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Todo: Read API call logic
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} read")
-}
-
-func (r *{{.NameCamel}}Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data {{.PackageName}}Gen.{{.NamePascal}}Model
-
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Todo: Update API call logic
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} updated")
-}
-
-func (r *{{.NameCamel}}Resource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data {{.PackageName}}Gen.{{.NamePascal}}Model
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Todo: Delete API call logic
-
- tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} deleted")
-}
-
// ModifyPlan implements resource.ResourceWithModifyPlan.
// Use the modifier to set the effective region in the current plan.
func (r *{{.NameCamel}}Resource) ModifyPlan(
@@ -191,17 +127,21 @@ func (r *{{.NameCamel}}Resource) ModifyPlan(
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
- var configModel {{.PackageName}}Gen.{{.NamePascal}}Model
+
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
}
+ var configModel {{.PackageName}}ResGen.{{.NamePascal}}Model
resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
if resp.Diagnostics.HasError() {
return
}
- var planModel {{.PackageName}}Gen.{{.NamePascal}}Model
+ if req.Plan.Raw.IsNull() {
+ return
+ }
+ var planModel {{.PackageName}}ResGen.{{.NamePascal}}Model
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -212,12 +152,254 @@ func (r *{{.NameCamel}}Resource) ModifyPlan(
return
}
+ var identityModel {{.NamePascal}}ResourceIdentityModel
+ identityModel.ProjectID = planModel.ProjectId
+ identityModel.Region = planModel.Region
+ if !planModel.{{.NamePascal}}Id.IsNull() && !planModel.{{.NamePascal}}Id.IsUnknown() {
+ identityModel.{{.NamePascal}}ID = planModel.{{.NamePascal}}Id
+ }
+
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
if resp.Diagnostics.HasError() {
return
}
}
+//go:embed planModifiers.yaml
+var modifiersFileByte []byte
+
+// Create creates a new resource
+func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data {{.PackageName}}ResGen.{{.NamePascal}}Model
+
+ // Read Terraform plan data into the model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData {{.NamePascal}}ResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // TODO: Create API call logic
+ /*
+ // Generate API request body from model
+ payload, err := toCreatePayload(ctx, &model)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating {{.NamePascal}}",
+ fmt.Sprintf("Creating API payload: %v", err),
+ )
+ return
+ }
+ // Create new {{.NamePascal}}
+ createResp, err := r.client.Create{{.NamePascal}}Request(
+ ctx,
+ projectId,
+ region,
+ ).Create{{.NamePascal}}RequestPayload(*payload).Execute()
+ if err != nil {
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating {{.NamePascal}}", fmt.Sprintf("Calling API: %v", err))
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ {{.NamePascal}}Id := *createResp.Id
+ */
+
+ // Example data value setting
+ data.{{.NameCamel | ucfirst}}Id = types.StringValue("id-from-response")
+
+ // TODO: Set data returned by API in identity
+ identity := {{.NamePascal}}ResourceIdentityModel{
+ ProjectID: types.StringValue(projectId),
+ Region: types.StringValue(region),
+ // TODO: add missing values
+ {{.NamePascal}}ID: types.StringValue({{.NamePascal}}Id),
+ }
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // TODO: implement wait handler if needed
+ /*
+
+ waitResp, err := wait.Create{{.NamePascal}}WaitHandler(
+ ctx,
+ r.client,
+ projectId,
+ {{.NamePascal}}Id,
+ region,
+ ).SetSleepBeforeWait(
+ 30 * time.Second,
+ ).SetTimeout(
+ 90 * time.Minute,
+ ).WaitWithContext(ctx)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating {{.NamePascal}}",
+ fmt.Sprintf("{{.NamePascal}} creation waiting: %v", err),
+ )
+ return
+ }
+
+ if waitResp.Id == nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating {{.NamePascal}}",
+ "{{.NamePascal}} creation waiting: returned id is nil",
+ )
+ return
+ }
+
+ // Map response body to schema
+ err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating {{.NamePascal}}",
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
+ return
+ }
+
+ */
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} created")
+}
+
+func (r *{{.NameCamel}}Resource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data {{.PackageName}}ResGen.{{.NamePascal}}Model
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData {{.NamePascal}}ResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Todo: Read API call logic
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ // TODO: Set data returned by API in identity
+ identity := {{.NamePascal}}ResourceIdentityModel{
+ ProjectID: types.StringValue(projectId),
+ Region: types.StringValue(region),
+ // InstanceID: types.StringValue(instanceId),
+ }
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} read")
+}
+
+func (r *{{.NameCamel}}Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data {{.PackageName}}ResGen.{{.NamePascal}}Model
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData {{.NamePascal}}ResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Todo: Update API call logic
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} updated")
+}
+
+func (r *{{.NameCamel}}Resource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data {{.PackageName}}ResGen.{{.NamePascal}}Model
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData {{.NamePascal}}ResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Todo: Delete API call logic
+
+ tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} deleted")
+}
+
// ImportState imports a resource into the Terraform state on success.
// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
func (r *{{.NameCamel}}Resource) ImportState(
diff --git a/cmd/cmd/build/templates/util.gotmpl b/cmd/cmd/build/templates/util.gotmpl
new file mode 100644
index 00000000..cecc8e9e
--- /dev/null
+++ b/cmd/cmd/build/templates/util.gotmpl
@@ -0,0 +1,47 @@
+package utils
+
+import (
+ "context"
+ "fmt"
+
+ {{.PackageName}} "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
+
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+)
+
+func ConfigureClient(
+ ctx context.Context,
+ providerData *core.ProviderData,
+ diags *diag.Diagnostics,
+) *{{.PackageName}}.APIClient {
+ apiClientConfigOptions := []config.ConfigurationOption{
+ config.WithCustomAuth(providerData.RoundTripper),
+ utils.UserAgentConfigOption(providerData.Version),
+ }
+ if providerData.{{.PackageName}}CustomEndpoint != "" {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithEndpoint(providerData.{{.PackageName}}CustomEndpoint),
+ )
+ } else {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(providerData.GetRegion()))
+ }
+ apiClient, err := {{.PackageName}}.NewAPIClient(apiClientConfigOptions...)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ diags,
+ "Error configuring API client",
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
+ )
+ return nil
+ }
+
+ return apiClient
+}
diff --git a/cmd/cmd/build/templates/util_test.gotmpl b/cmd/cmd/build/templates/util_test.gotmpl
new file mode 100644
index 00000000..567f2623
--- /dev/null
+++ b/cmd/cmd/build/templates/util_test.gotmpl
@@ -0,0 +1,97 @@
+package utils
+
+import (
+ "context"
+ "os"
+ "reflect"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+ {{.PackageName}} "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+)
+
+const (
+ testVersion = "1.2.3"
+ testCustomEndpoint = "https://sqlserverflex-custom-endpoint.api.stackit.cloud"
+)
+
+func TestConfigureClient(t *testing.T) {
+ /* mock authentication by setting service account token env variable */
+ os.Clearenv()
+ err := os.Setenv(sdkClients.ServiceAccountToken, "mock-val")
+ if err != nil {
+ t.Errorf("error setting env variable: %v", err)
+ }
+
+ type args struct {
+ providerData *core.ProviderData
+ }
+ tests := []struct {
+ name string
+ args args
+ wantErr bool
+ expected *sqlserverflex.APIClient
+ }{
+ {
+ name: "default endpoint",
+ args: args{
+ providerData: &core.ProviderData{
+ Version: testVersion,
+ },
+ },
+ expected: func() *sqlserverflex.APIClient {
+ apiClient, err := sqlserverflex.NewAPIClient(
+ config.WithRegion("eu01"),
+ utils.UserAgentConfigOption(testVersion),
+ )
+ if err != nil {
+ t.Errorf("error configuring client: %v", err)
+ }
+ return apiClient
+ }(),
+ wantErr: false,
+ },
+ {
+ name: "custom endpoint",
+ args: args{
+ providerData: &core.ProviderData{
+ Version: testVersion,
+ SQLServerFlexCustomEndpoint: testCustomEndpoint,
+ },
+ },
+ expected: func() *sqlserverflex.APIClient {
+ apiClient, err := sqlserverflex.NewAPIClient(
+ utils.UserAgentConfigOption(testVersion),
+ config.WithEndpoint(testCustomEndpoint),
+ )
+ if err != nil {
+ t.Errorf("error configuring client: %v", err)
+ }
+ return apiClient
+ }(),
+ wantErr: false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.name, func(t *testing.T) {
+ ctx := context.Background()
+ diags := diag.Diagnostics{}
+
+ actual := ConfigureClient(ctx, tt.args.providerData, &diags)
+ if diags.HasError() != tt.wantErr {
+ t.Errorf("ConfigureClient() error = %v, want %v", diags.HasError(), tt.wantErr)
+ }
+
+ if !reflect.DeepEqual(actual, tt.expected) {
+ t.Errorf("ConfigureClient() = %v, want %v", actual, tt.expected)
+ }
+ },
+ )
+ }
+}
diff --git a/cmd/cmd/publish/architecture.go b/cmd/cmd/publish/architecture.go
index a2e6f6af..f77188c3 100644
--- a/cmd/cmd/publish/architecture.go
+++ b/cmd/cmd/publish/architecture.go
@@ -94,7 +94,7 @@ func (p *Provider) CreateArchitectureFiles() error {
archFileName := path.Join(downloadPathPrefix, target, arch)
a := Architecture{
- Protocols: []string{"5.1"},
+ Protocols: []string{"5.1", "6.0"},
OS: target,
Arch: arch,
FileName: sum.Path,
diff --git a/cmd/cmd/publish/templates/index.html.gompl b/cmd/cmd/publish/templates/index.html.gompl
new file mode 100644
index 00000000..531032fe
--- /dev/null
+++ b/cmd/cmd/publish/templates/index.html.gompl
@@ -0,0 +1,11 @@
+
+
+
+ Forwarding | Weiterleitung
+
+
+
+Falls Sie nicht automatisch weitergeleitet werden, klicken Sie bitte hier.
+Sie gelangen dann auf unsere Hauptseite
+
+
diff --git a/cmd/cmd/publish/templates/index.md.gompl b/cmd/cmd/publish/templates/index.md.gompl
new file mode 100644
index 00000000..3ebaa0e1
--- /dev/null
+++ b/cmd/cmd/publish/templates/index.md.gompl
@@ -0,0 +1,34 @@
+---
+page_title: STACKIT provider PrivatePreview
+description: none
+---
+
+# provider
+[Provider](docs/index.md)
+
+## PostGreSQL alpha
+### data sources
+
+- [Flavor](docs/data-sources/postgresflexalpha_flavor.md)
+- [Database](docs/data-sources/postgresflexalpha_database.md)
+- [Instance](docs/data-sources/postgresflexalpha_instance.md)
+- [Flavors](docs/data-sources/postgresflexalpha_flavors.md)
+- [User](docs/data-sources/postgresflexalpha_user.md)
+
+### resources
+- [Database](docs/resources/postgresflexalpha_database.md)
+- [Instance](docs/resources/postgresflexalpha_instance.md)
+- [User](docs/resources/postgresflexalpha_user.md)
+
+## SQL Server alpha
+### data sources
+- [Database](docs/data-sources/sqlserverflexalpha_database.md)
+- [Version](docs/data-sources/sqlserverflexalpha_version.md)
+- [User](docs/data-sources/sqlserverflexalpha_user.md)
+- [Flavor](docs/data-sources/sqlserverflexalpha_flavor.md)
+- [Instance](docs/data-sources/sqlserverflexalpha_instance.md)
+
+### resources
+- [Database](docs/resources/sqlserverflexalpha_database.md)
+- [User](docs/resources/sqlserverflexalpha_user.md)
+- [Instance](docs/resources/sqlserverflexalpha_instance.md)
diff --git a/cmd/cmd/publish/templates/markdown.html.gompl b/cmd/cmd/publish/templates/markdown.html.gompl
new file mode 100644
index 00000000..d338b241
--- /dev/null
+++ b/cmd/cmd/publish/templates/markdown.html.gompl
@@ -0,0 +1,79 @@
+
+{{ $mdFile := .OriginalReq.URL.Path | trimPrefix "/docs" }}
+{{ $md := (include $mdFile | splitFrontMatter) }}
+
+
+ {{$md.Meta.page_title}}
+
+
+
+
+{{$md.Meta.page_title}}
+
+
+
+
+
+
+
+
+
+
+ {{markdown $md.Body}}
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/data-sources/postgresflexalpha_flavor.md b/docs/data-sources/postgresflexalpha_flavor.md
index 4d28ffc3..24c79829 100644
--- a/docs/data-sources/postgresflexalpha_flavor.md
+++ b/docs/data-sources/postgresflexalpha_flavor.md
@@ -10,7 +10,18 @@ description: |-
+## Example Usage
+```terraform
+data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
+ project_id = var.project_id
+ region = var.region
+ cpu = 4
+ ram = 16
+ node_type = "Single"
+ storage_class = "premium-perf2-stackit"
+}
+```
## Schema
diff --git a/docs/data-sources/sqlserverflexalpha_flavor.md b/docs/data-sources/sqlserverflexalpha_flavor.md
index 426a0605..0dfc1fd2 100644
--- a/docs/data-sources/sqlserverflexalpha_flavor.md
+++ b/docs/data-sources/sqlserverflexalpha_flavor.md
@@ -10,7 +10,18 @@ description: |-
+## Example Usage
+```terraform
+data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
+ project_id = var.project_id
+ region = var.region
+ cpu = 4
+ ram = 16
+ node_type = "Single"
+ storage_class = "premium-perf2-stackit"
+}
+```
## Schema
diff --git a/docs/data-sources/sqlserverflexalpha_version.md b/docs/data-sources/sqlserverflexalpha_version.md
deleted file mode 100644
index c9c61732..00000000
--- a/docs/data-sources/sqlserverflexalpha_version.md
+++ /dev/null
@@ -1,35 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "stackitprivatepreview_sqlserverflexalpha_version Data Source - stackitprivatepreview"
-subcategory: ""
-description: |-
-
----
-
-# stackitprivatepreview_sqlserverflexalpha_version (Data Source)
-
-
-
-
-
-
-## Schema
-
-### Required
-
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-
-### Read-Only
-
-- `versions` (Attributes List) A list containing available sqlserver versions. (see [below for nested schema](#nestedatt--versions))
-
-
-### Nested Schema for `versions`
-
-Read-Only:
-
-- `beta` (Boolean) Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.
-- `deprecated` (String) Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.
-- `recommend` (Boolean) Flag if the version is recommend by the STACKIT Team.
-- `version` (String) The sqlserver version used for the instance.
diff --git a/docs/data-sources/sqlserverflexbeta_database.md b/docs/data-sources/sqlserverflexbeta_database.md
new file mode 100644
index 00000000..98a29d9f
--- /dev/null
+++ b/docs/data-sources/sqlserverflexbeta_database.md
@@ -0,0 +1,42 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_database Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_database (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ database_name = "dbname"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `database_name` (String) The name of the database.
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+
+### Optional
+
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
+- `compatibility_level` (Number) CompatibilityLevel of the Database.
+- `id` (Number) The id of the database.
+- `name` (String) The name of the database.
+- `owner` (String) The owner of the database.
diff --git a/docs/data-sources/sqlserverflexbeta_flavor.md b/docs/data-sources/sqlserverflexbeta_flavor.md
new file mode 100644
index 00000000..6c1569be
--- /dev/null
+++ b/docs/data-sources/sqlserverflexbeta_flavor.md
@@ -0,0 +1,47 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
+ project_id = var.project_id
+ region = var.region
+ cpu = 4
+ ram = 16
+ node_type = "Single"
+ storage_class = "premium-perf2-stackit"
+}
+```
+
+
+## Schema
+
+### Read-Only
+
+- `cpu` (Number) The cpu count of the instance.
+- `description` (String) The flavor description.
+- `id` (String) The id of the instance flavor.
+- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
+- `memory` (Number) The memory of the instance in Gibibyte.
+- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
+- `node_type` (String) defines the nodeType it can be either single or HA
+- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
+
+
+### Nested Schema for `storage_classes`
+
+Read-Only:
+
+- `class` (String)
+- `max_io_per_sec` (Number)
+- `max_through_in_mb` (Number)
diff --git a/docs/data-sources/sqlserverflexbeta_instance.md b/docs/data-sources/sqlserverflexbeta_instance.md
new file mode 100644
index 00000000..cc3645ef
--- /dev/null
+++ b/docs/data-sources/sqlserverflexbeta_instance.md
@@ -0,0 +1,77 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_instance Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_instance (Data Source)
+
+
+
+## Example Usage
+
+```terraform
+data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
+- `edition` (String) Edition of the MSSQL server instance
+- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
+- `flavor_id` (String) The id of the instance flavor.
+- `id` (String) The ID of the instance.
+- `is_deletable` (Boolean) Whether the instance can be deleted or not.
+- `name` (String) The name of the instance.
+- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
+- `replicas` (Number) How many replicas the instance should have.
+- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
+- `status` (String)
+- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `version` (String) The sqlserver version used for the instance.
+
+
+### Nested Schema for `encryption`
+
+Read-Only:
+
+- `kek_key_id` (String) The key identifier
+- `kek_key_ring_id` (String) The keyring identifier
+- `kek_key_version` (String) The key version
+- `service_account` (String)
+
+
+
+### Nested Schema for `network`
+
+Read-Only:
+
+- `access_scope` (String) The network access scope of the instance
+
+⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
+- `acl` (List of String) List of IPV4 cidr.
+- `instance_address` (String)
+- `router_address` (String)
+
+
+
+### Nested Schema for `storage`
+
+Read-Only:
+
+- `class` (String) The storage class for the storage.
+- `size` (Number) The storage size in Gigabytes.
diff --git a/docs/index.md b/docs/index.md
index 4f1e52cd..84bc25b3 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -16,14 +16,13 @@ provider "stackitprivatepreview" {
default_region = "eu01"
}
-# Authentication
-
-# Token flow (scheduled for deprecation and will be removed on December 17, 2025)
provider "stackitprivatepreview" {
- default_region = "eu01"
- service_account_token = var.service_account_token
+ default_region = "eu01"
+ service_account_key_path = "service_account.json"
}
+# Authentication
+
# Key flow
provider "stackitprivatepreview" {
default_region = "eu01"
diff --git a/docs/resources/sqlserverflexbeta_database.md b/docs/resources/sqlserverflexbeta_database.md
new file mode 100644
index 00000000..893433fe
--- /dev/null
+++ b/docs/resources/sqlserverflexbeta_database.md
@@ -0,0 +1,36 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_database Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_database (Resource)
+
+
+
+
+
+
+## Schema
+
+### Required
+
+- `name` (String) The name of the database.
+- `owner` (String) The owner of the database.
+
+### Optional
+
+- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
+- `compatibility` (Number) CompatibilityLevel of the Database.
+- `database_name` (String) The name of the database.
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
+- `compatibility_level` (Number) CompatibilityLevel of the Database.
+- `id` (Number) The id of the database.
diff --git a/docs/resources/sqlserverflexbeta_instance.md b/docs/resources/sqlserverflexbeta_instance.md
new file mode 100644
index 00000000..20f5a9bc
--- /dev/null
+++ b/docs/resources/sqlserverflexbeta_instance.md
@@ -0,0 +1,158 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexbeta_instance Resource - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexbeta_instance (Resource)
+
+
+
+## Example Usage
+
+```terraform
+# without encryption and SNA
+resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = "flavor_id"
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "SNA"
+ }
+}
+
+# without encryption and PUBLIC
+resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = "flavor_id"
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "PUBLIC"
+ }
+}
+
+# with encryption and SNA
+resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = "flavor_id"
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ encryption = {
+ kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ kek_key_version = 1
+ service_account = "service_account@email"
+ }
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "SNA"
+ }
+}
+
+
+# Only use the import statement, if you want to import an existing sqlserverflex instance
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
+ id = "${var.project_id},${var.region},${var.sql_instance_id}"
+}
+
+# import with identity
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
+ identity = {
+ project_id = var.project_id
+ region = var.region
+ instance_id = var.sql_instance_id
+ }
+}
+```
+
+
+## Schema
+
+### Required
+
+- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
+- `flavor_id` (String) The id of the instance flavor.
+- `name` (String) The name of the instance.
+- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
+- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
+- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
+- `version` (String) The sqlserver version used for the instance.
+
+### Optional
+
+- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
+- `instance_id` (String) The ID of the instance.
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `edition` (String) Edition of the MSSQL server instance
+- `id` (String) The ID of the instance.
+- `is_deletable` (Boolean) Whether the instance can be deleted or not.
+- `replicas` (Number) How many replicas the instance should have.
+- `status` (String)
+
+
+### Nested Schema for `network`
+
+Required:
+
+- `acl` (List of String) List of IPV4 cidr.
+
+Optional:
+
+- `access_scope` (String) The network access scope of the instance
+
+⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
+
+Read-Only:
+
+- `instance_address` (String)
+- `router_address` (String)
+
+
+
+### Nested Schema for `storage`
+
+Required:
+
+- `class` (String) The storage class for the storage.
+- `size` (Number) The storage size in Gigabytes.
+
+
+
+### Nested Schema for `encryption`
+
+Required:
+
+- `kek_key_id` (String) The key identifier
+- `kek_key_ring_id` (String) The keyring identifier
+- `kek_key_version` (String) The key version
+- `service_account` (String)
diff --git a/examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf b/examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf
new file mode 100644
index 00000000..67017935
--- /dev/null
+++ b/examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf
@@ -0,0 +1,8 @@
+data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
+ project_id = var.project_id
+ region = var.region
+ cpu = 4
+ ram = 16
+ node_type = "Single"
+ storage_class = "premium-perf2-stackit"
+}
diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf
new file mode 100644
index 00000000..25d94537
--- /dev/null
+++ b/examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf
@@ -0,0 +1,8 @@
+data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
+ project_id = var.project_id
+ region = var.region
+ cpu = 4
+ ram = 16
+ node_type = "Single"
+ storage_class = "premium-perf2-stackit"
+}
diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf
new file mode 100644
index 00000000..894fcd33
--- /dev/null
+++ b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf
@@ -0,0 +1,5 @@
+data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ database_name = "dbname"
+}
diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf
new file mode 100644
index 00000000..f40b9680
--- /dev/null
+++ b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf
@@ -0,0 +1,8 @@
+data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
+ project_id = var.project_id
+ region = var.region
+ cpu = 4
+ ram = 16
+ node_type = "Single"
+ storage_class = "premium-perf2-stackit"
+}
diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf
new file mode 100644
index 00000000..b8c8fc2b
--- /dev/null
+++ b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf
@@ -0,0 +1,4 @@
+data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+}
diff --git a/examples/provider/provider.tf b/examples/provider/provider.tf
index 1795874c..4db0aed3 100644
--- a/examples/provider/provider.tf
+++ b/examples/provider/provider.tf
@@ -2,14 +2,13 @@ provider "stackitprivatepreview" {
default_region = "eu01"
}
-# Authentication
-
-# Token flow (scheduled for deprecation and will be removed on December 17, 2025)
provider "stackitprivatepreview" {
- default_region = "eu01"
- service_account_token = var.service_account_token
+ default_region = "eu01"
+ service_account_key_path = "service_account.json"
}
+# Authentication
+
# Key flow
provider "stackitprivatepreview" {
default_region = "eu01"
@@ -23,4 +22,3 @@ provider "stackitprivatepreview" {
service_account_key_path = var.service_account_key_path
private_key_path = var.private_key_path
}
-
diff --git a/examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf b/examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf
new file mode 100644
index 00000000..06e88f64
--- /dev/null
+++ b/examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf
@@ -0,0 +1,76 @@
+# without encryption and SNA
+resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = "flavor_id"
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "SNA"
+ }
+}
+
+# without encryption and PUBLIC
+resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = "flavor_id"
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "PUBLIC"
+ }
+}
+
+# with encryption and SNA
+resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
+ project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ name = "example-instance"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = "flavor_id"
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ encryption = {
+ kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+ kek_key_version = 1
+ service_account = "service_account@email"
+ }
+ network = {
+ acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
+ access_scope = "SNA"
+ }
+}
+
+
+# Only use the import statement, if you want to import an existing sqlserverflex instance
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
+ id = "${var.project_id},${var.region},${var.sql_instance_id}"
+}
+
+# import with identity
+import {
+ to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
+ identity = {
+ project_id = var.project_id
+ region = var.region
+ instance_id = var.sql_instance_id
+ }
+}
diff --git a/go.mod b/go.mod
index 0815f3a3..10aca46d 100644
--- a/go.mod
+++ b/go.mod
@@ -29,13 +29,22 @@ require (
require (
dario.cat/mergo v1.0.1 // indirect
+ github.com/BurntSushi/toml v1.2.1 // indirect
+ github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
+ github.com/Masterminds/goutils v1.1.1 // indirect
+ github.com/Masterminds/semver/v3 v3.2.0 // indirect
+ github.com/Masterminds/sprig/v3 v3.2.3 // indirect
github.com/ProtonMail/go-crypto v1.3.0 // indirect
github.com/agext/levenshtein v1.2.3 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
+ github.com/armon/go-radix v1.0.0 // indirect
+ github.com/bgentry/speakeasy v0.1.0 // indirect
+ github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
github.com/cloudflare/circl v1.6.2 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
github.com/golang/protobuf v1.5.4 // indirect
+ github.com/hashicorp/cli v1.1.7 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
@@ -50,27 +59,38 @@ require (
github.com/hashicorp/logutils v1.0.0 // indirect
github.com/hashicorp/terraform-exec v0.24.0 // indirect
github.com/hashicorp/terraform-json v0.27.2 // indirect
+ github.com/hashicorp/terraform-plugin-docs v0.24.0 // indirect
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 // indirect
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
github.com/hashicorp/terraform-svchost v0.2.0 // indirect
github.com/hashicorp/yamux v0.1.2 // indirect
+ github.com/huandu/xstrings v1.3.3 // indirect
+ github.com/imdario/mergo v0.3.15 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/kr/text v0.2.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
+ github.com/mattn/go-runewidth v0.0.9 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/oklog/run v1.2.0 // indirect
+ github.com/posener/complete v1.2.3 // indirect
+ github.com/shopspring/decimal v1.3.1 // indirect
+ github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/pflag v1.0.10 // indirect
github.com/stretchr/testify v1.11.1 // indirect
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
+ github.com/yuin/goldmark v1.7.7 // indirect
+ github.com/yuin/goldmark-meta v1.1.0 // indirect
github.com/zclconf/go-cty v1.17.0 // indirect
+ go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
golang.org/x/crypto v0.47.0 // indirect
+ golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect
golang.org/x/mod v0.32.0 // indirect
golang.org/x/net v0.49.0 // indirect
golang.org/x/sync v0.19.0 // indirect
@@ -81,6 +101,7 @@ require (
google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect
google.golang.org/grpc v1.78.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect
+ gopkg.in/yaml.v2 v2.4.0 // indirect
)
tool golang.org/x/tools/cmd/goimports
diff --git a/go.sum b/go.sum
index a7b3189b..59906446 100644
--- a/go.sum
+++ b/go.sum
@@ -1,5 +1,15 @@
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
+github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak=
+github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
+github.com/Kunde21/markdownfmt/v3 v3.1.0 h1:KiZu9LKs+wFFBQKhrZJrFZwtLnCCWJahL+S+E/3VnM0=
+github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc=
+github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
+github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
+github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
+github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
+github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
+github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
github.com/MatusOllah/slogcolor v1.7.0 h1:Nrd7yBPv2EBEEBEwl7WEPRmMd1ozZzw2jm8SLMYDbKs=
github.com/MatusOllah/slogcolor v1.7.0/go.mod h1:5y1H50XuQIBvuYTJlmokWi+4FuPiJN5L7Z0jM4K4bYA=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
@@ -11,6 +21,12 @@ github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki
github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec=
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
+github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI=
+github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
+github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
+github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
+github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE=
+github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw=
github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c=
github.com/cloudflare/circl v1.6.2 h1:hL7VBpHHKzrV5WTfHCaBsgx/HGbBYlgrwvNXEVDYYsQ=
@@ -54,8 +70,11 @@ github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
+github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/hashicorp/cli v1.1.7 h1:/fZJ+hNdwfTSfsxMBa9WWMlfjUZbX8/LnUxgAd7lCVU=
+github.com/hashicorp/cli v1.1.7/go.mod h1:e6Mfpga9OCT1vqzFuoGZiiF/KaG9CbUfO5s3ghU3YgU=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@@ -68,6 +87,7 @@ github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g
github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM=
github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k=
github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
+github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/go-plugin v1.7.0 h1:YghfQH/0QmPNc/AZMTFE3ac8fipZyZECHdDPshfk+mA=
@@ -89,6 +109,8 @@ github.com/hashicorp/terraform-exec v0.24.0 h1:mL0xlk9H5g2bn0pPF6JQZk5YlByqSqrO5
github.com/hashicorp/terraform-exec v0.24.0/go.mod h1:lluc/rDYfAhYdslLJQg3J0oDqo88oGQAdHR+wDqFvo4=
github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU=
github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE=
+github.com/hashicorp/terraform-plugin-docs v0.24.0 h1:YNZYd+8cpYclQyXbl1EEngbld8w7/LPOm99GD5nikIU=
+github.com/hashicorp/terraform-plugin-docs v0.24.0/go.mod h1:YLg+7LEwVmRuJc0EuCw0SPLxuQXw5mW8iJ5ml/kvi+o=
github.com/hashicorp/terraform-plugin-framework v1.17.0 h1:JdX50CFrYcYFY31gkmitAEAzLKoBgsK+iaJjDC8OexY=
github.com/hashicorp/terraform-plugin-framework v1.17.0/go.mod h1:4OUXKdHNosX+ys6rLgVlgklfxN3WHR5VHSOABeS/BM0=
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow=
@@ -107,8 +129,13 @@ github.com/hashicorp/terraform-svchost v0.2.0 h1:wVc2vMiodOHvNZcQw/3y9af1XSomgjG
github.com/hashicorp/terraform-svchost v0.2.0/go.mod h1:/98rrS2yZsbppi4VGVCjwYmh8dqsKzISqK7Hli+0rcQ=
github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8=
github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns=
+github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4=
+github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
+github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
+github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
+github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
@@ -137,6 +164,9 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
+github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
+github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU=
@@ -145,6 +175,7 @@ github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQ
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E=
@@ -154,13 +185,21 @@ github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxu
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo=
+github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
+github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
+github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
+github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
+github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
+github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
+github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
@@ -175,6 +214,9 @@ github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1 h1:6MJdy1xmdE+uOo/F8mR5HSldjPSHpdhwuqS3u9m2EWQ=
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1/go.mod h1:XLr3ZfrT1g8ZZMm7A6RXOPBuhBkikdUN2o/+/Y+Hu+g=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
@@ -190,10 +232,16 @@ github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+github.com/yuin/goldmark v1.7.7 h1:5m9rrB1sW3JUMToKFQfb+FGt1U7r57IHu5GrYrG2nqU=
+github.com/yuin/goldmark v1.7.7/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
+github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc=
+github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0=
github.com/zclconf/go-cty v1.17.0 h1:seZvECve6XX4tmnvRzWtJNHdscMtYEx5R7bnnVyd/d0=
github.com/zclconf/go-cty v1.17.0/go.mod h1:wqFzcImaLTI6A5HfsRwB0nj5n0MRZFwmey8YoFPPs3U=
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo=
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM=
+go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw=
+go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU=
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
@@ -209,8 +257,11 @@ go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42s
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
+golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME=
+golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
@@ -218,6 +269,7 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -235,6 +287,7 @@ golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
@@ -242,12 +295,14 @@ golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 h1:i0p03B68+xC1kD2QUO8
golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5/go.mod h1:b7fPSJ0pKZ3ccUh8gnTONJxhn3c/PS6tyzQvyqw4iA8=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
+golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@@ -276,6 +331,9 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/sample/sqlserver/sqlserver.tf b/sample/sqlserver/sqlserver.tf
index 847678bb..bf6a88d1 100644
--- a/sample/sqlserver/sqlserver.tf
+++ b/sample/sqlserver/sqlserver.tf
@@ -46,6 +46,34 @@ resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-001" {
}
}
+resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-101" {
+ project_id = var.project_id
+ name = "msh-sna-101"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ encryption = {
+ #key_id = stackit_kms_key.key.key_id
+ #keyring_id = stackit_kms_keyring.keyring.keyring_id
+ #key_version = 1
+ # key with scope public
+ kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
+ # key_id = var.key_id
+ kek_key_ring_id = var.keyring_id
+ kek_key_version = var.key_version
+ service_account = var.sa_email
+ }
+ network = {
+ acl = ["0.0.0.0/0", "193.148.160.0/19"]
+ access_scope = "SNA"
+ }
+}
+
resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-nosna-001" {
project_id = var.project_id
name = "msh-nosna-001"
diff --git a/service_specs/sqlserverflex/alpha/collation_config.yml.disabled b/service_specs/sqlserverflex/alpha/collation_config.yml.bak
similarity index 92%
rename from service_specs/sqlserverflex/alpha/collation_config.yml.disabled
rename to service_specs/sqlserverflex/alpha/collation_config.yml.bak
index 9cb13c19..9ebfe5b4 100644
--- a/service_specs/sqlserverflex/alpha/collation_config.yml.disabled
+++ b/service_specs/sqlserverflex/alpha/collation_config.yml.bak
@@ -2,7 +2,7 @@ provider:
name: stackitprivatepreview
data_sources:
- collation:
+ collations:
read:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/collations
method: GET
diff --git a/service_specs/sqlserverflex/alpha/database_config.yml b/service_specs/sqlserverflex/alpha/database_config.yml
index e8ea6ef9..cd592e80 100644
--- a/service_specs/sqlserverflex/alpha/database_config.yml
+++ b/service_specs/sqlserverflex/alpha/database_config.yml
@@ -1,13 +1,8 @@
-
provider:
name: stackitprivatepreview
resources:
database:
- schema:
- attributes:
- aliases:
- id: database_id
create:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases
method: POST
@@ -17,6 +12,10 @@ resources:
delete:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
method: DELETE
+ schema:
+ attributes:
+ aliases:
+ id: databaseId
data_sources:
@@ -26,9 +25,10 @@ data_sources:
method: GET
database:
- attributes:
- aliases:
- id: database_id
read:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
method: GET
+ schema:
+ attributes:
+ aliases:
+ id: database_id
diff --git a/service_specs/sqlserverflex/alpha/version_config.yml b/service_specs/sqlserverflex/alpha/version_config.yml.bak
similarity index 92%
rename from service_specs/sqlserverflex/alpha/version_config.yml
rename to service_specs/sqlserverflex/alpha/version_config.yml.bak
index 3a3f982d..937dccd5 100644
--- a/service_specs/sqlserverflex/alpha/version_config.yml
+++ b/service_specs/sqlserverflex/alpha/version_config.yml.bak
@@ -3,7 +3,7 @@ provider:
name: stackitprivatepreview
data_sources:
- version:
+ versions:
read:
path: /v3alpha1/projects/{projectId}/regions/{region}/versions
method: GET
diff --git a/service_specs/sqlserverflex/beta/backup_config.yml.disabled b/service_specs/sqlserverflex/beta/backup_config.yml.disabled
new file mode 100644
index 00000000..7df5fc4b
--- /dev/null
+++ b/service_specs/sqlserverflex/beta/backup_config.yml.disabled
@@ -0,0 +1,13 @@
+provider:
+ name: stackitprivatepreview
+
+data_sources:
+ backups:
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/backups
+ method: GET
+
+ backup:
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/backups/{backupId}
+ method: GET
diff --git a/service_specs/sqlserverflex/beta/collation_config.yml.disabled b/service_specs/sqlserverflex/beta/collation_config.yml.disabled
new file mode 100644
index 00000000..d1160ec3
--- /dev/null
+++ b/service_specs/sqlserverflex/beta/collation_config.yml.disabled
@@ -0,0 +1,8 @@
+provider:
+ name: stackitprivatepreview
+
+data_sources:
+ collation:
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/collations
+ method: GET
diff --git a/service_specs/sqlserverflex/beta/database_config.yml b/service_specs/sqlserverflex/beta/database_config.yml
new file mode 100644
index 00000000..d886fc20
--- /dev/null
+++ b/service_specs/sqlserverflex/beta/database_config.yml
@@ -0,0 +1,34 @@
+
+provider:
+ name: stackitprivatepreview
+
+resources:
+ database:
+ schema:
+ attributes:
+ aliases:
+ id: databaseId
+ create:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases
+ method: POST
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
+ method: GET
+ delete:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
+ method: DELETE
+
+
+data_sources:
+ databases:
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases
+ method: GET
+
+ database:
+ attributes:
+ aliases:
+ id: database_id
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
+ method: GET
diff --git a/service_specs/sqlserverflex/beta/flavors_config.yml b/service_specs/sqlserverflex/beta/flavors_config.yml
new file mode 100644
index 00000000..4b985a4c
--- /dev/null
+++ b/service_specs/sqlserverflex/beta/flavors_config.yml
@@ -0,0 +1,9 @@
+
+provider:
+ name: stackitprivatepreview
+
+data_sources:
+ flavors:
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/flavors
+ method: GET
diff --git a/service_specs/sqlserverflex/beta/instance_config.yml b/service_specs/sqlserverflex/beta/instance_config.yml
new file mode 100644
index 00000000..cea25959
--- /dev/null
+++ b/service_specs/sqlserverflex/beta/instance_config.yml
@@ -0,0 +1,28 @@
+provider:
+ name: stackitprivatepreview
+
+resources:
+ instance:
+ create:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances
+ method: POST
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}
+ method: GET
+ update:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}
+ method: PUT
+ delete:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}
+ method: DELETE
+
+data_sources:
+ instances:
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances
+ method: GET
+
+ instance:
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}
+ method: GET
diff --git a/service_specs/sqlserverflex/beta/user_config.yml b/service_specs/sqlserverflex/beta/user_config.yml
new file mode 100644
index 00000000..bfa9a3a7
--- /dev/null
+++ b/service_specs/sqlserverflex/beta/user_config.yml
@@ -0,0 +1,24 @@
+
+provider:
+ name: stackitprivatepreview
+
+resources:
+ user:
+ create:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users
+ method: POST
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users/{userId}
+ method: GET
+ update:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users/{userId}
+ method: PUT
+ delete:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users/{userId}
+ method: DELETE
+
+data_sources:
+ user:
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users
+ method: GET
diff --git a/service_specs/sqlserverflex/beta/version_config.yml.bak b/service_specs/sqlserverflex/beta/version_config.yml.bak
new file mode 100644
index 00000000..70d79676
--- /dev/null
+++ b/service_specs/sqlserverflex/beta/version_config.yml.bak
@@ -0,0 +1,9 @@
+
+provider:
+ name: stackitprivatepreview
+
+data_sources:
+ version:
+ read:
+ path: /v3beta1/projects/{projectId}/regions/{region}/versions
+ method: GET
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
index 9765d99a..3f8f787e 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
@@ -1,6 +1,6 @@
// Copyright (c) STACKIT
-package sqlserverflex
+package sqlserverflexalpha
import (
"context"
@@ -218,6 +218,7 @@ func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequ
// Read refreshes the Terraform state with the latest data.
func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
+ //var model sqlserverflexalpha2.InstanceModel
var model sqlserverflexalpha2.InstanceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go
index ee75cd21..783d95e1 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/functions.go
@@ -1,4 +1,4 @@
-package sqlserverflex
+package sqlserverflexalpha
import (
"context"
@@ -113,152 +113,6 @@ func handleEncryption(
return enc
}
-//func mapFields(
-// ctx context.Context,
-// resp *sqlserverflex.GetInstanceResponse,
-// model *Model,
-// storage *storageModel,
-// encryption *encryptionModel,
-// network *networkModel,
-// region string,
-//) error {
-// if resp == nil {
-// return fmt.Errorf("response input is nil")
-// }
-// if model == nil {
-// return fmt.Errorf("model input is nil")
-// }
-// instance := resp
-//
-// var instanceId string
-// if model.InstanceId.ValueString() != "" {
-// instanceId = model.InstanceId.ValueString()
-// } else if instance.Id != nil {
-// instanceId = *instance.Id
-// } else {
-// return fmt.Errorf("instance id not present")
-// }
-//
-// var storageValues map[string]attr.Value
-// if instance.Storage == nil {
-// storageValues = map[string]attr.Value{
-// "class": storage.Class,
-// "size": storage.Size,
-// }
-// } else {
-// storageValues = map[string]attr.Value{
-// "class": types.StringValue(*instance.Storage.Class),
-// "size": types.Int64PointerValue(instance.Storage.Size),
-// }
-// }
-// storageObject, diags := types.ObjectValue(storageTypes, storageValues)
-// if diags.HasError() {
-// return fmt.Errorf("creating storage: %w", core.DiagsToError(diags))
-// }
-//
-// var encryptionValues map[string]attr.Value
-// if instance.Encryption == nil {
-// encryptionValues = map[string]attr.Value{
-// "keyring_id": encryption.KeyRingId,
-// "key_id": encryption.KeyId,
-// "key_version": encryption.KeyVersion,
-// "service_account": encryption.ServiceAccount,
-// }
-// } else {
-// encryptionValues = map[string]attr.Value{
-// "keyring_id": types.StringValue(*instance.Encryption.KekKeyRingId),
-// "key_id": types.StringValue(*instance.Encryption.KekKeyId),
-// "key_version": types.StringValue(*instance.Encryption.KekKeyVersion),
-// "service_account": types.StringValue(*instance.Encryption.ServiceAccount),
-// }
-// }
-// encryptionObject, diags := types.ObjectValue(encryptionTypes, encryptionValues)
-// if diags.HasError() {
-// return fmt.Errorf("creating encryption: %w", core.DiagsToError(diags))
-// }
-//
-// var networkValues map[string]attr.Value
-// if instance.Network == nil {
-// networkValues = map[string]attr.Value{
-// "acl": network.ACL,
-// "access_scope": network.AccessScope,
-// "instance_address": network.InstanceAddress,
-// "router_address": network.RouterAddress,
-// }
-// } else {
-// aclList, diags := types.ListValueFrom(ctx, types.StringType, *instance.Network.Acl)
-// if diags.HasError() {
-// return fmt.Errorf("creating network (acl list): %w", core.DiagsToError(diags))
-// }
-//
-// var routerAddress string
-// if instance.Network.RouterAddress != nil {
-// routerAddress = *instance.Network.RouterAddress
-// diags.AddWarning("field missing while mapping fields", "router_address was empty in API response")
-// }
-// if instance.Network.InstanceAddress == nil {
-// return fmt.Errorf("creating network: no instance address returned")
-// }
-// networkValues = map[string]attr.Value{
-// "acl": aclList,
-// "access_scope": types.StringValue(string(*instance.Network.AccessScope)),
-// "instance_address": types.StringValue(*instance.Network.InstanceAddress),
-// "router_address": types.StringValue(routerAddress),
-// }
-// }
-// networkObject, diags := types.ObjectValue(networkTypes, networkValues)
-// if diags.HasError() {
-// return fmt.Errorf("creating network: %w", core.DiagsToError(diags))
-// }
-//
-// simplifiedModelBackupSchedule := utils.SimplifyBackupSchedule(model.BackupSchedule.ValueString())
-// // If the value returned by the API is different from the one in the model after simplification,
-// // we update the model so that it causes an error in Terraform
-// if simplifiedModelBackupSchedule != types.StringPointerValue(instance.BackupSchedule).ValueString() {
-// model.BackupSchedule = types.StringPointerValue(instance.BackupSchedule)
-// }
-//
-// if instance.Replicas == nil {
-// return fmt.Errorf("instance has no replicas set")
-// }
-//
-// if instance.RetentionDays == nil {
-// return fmt.Errorf("instance has no retention days set")
-// }
-//
-// if instance.Version == nil {
-// return fmt.Errorf("instance has no version set")
-// }
-//
-// if instance.Edition == nil {
-// return fmt.Errorf("instance has no edition set")
-// }
-//
-// if instance.Status == nil {
-// return fmt.Errorf("instance has no status set")
-// }
-//
-// if instance.IsDeletable == nil {
-// return fmt.Errorf("instance has no IsDeletable set")
-// }
-//
-// model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, instanceId)
-// model.InstanceId = types.StringValue(instanceId)
-// model.Name = types.StringPointerValue(instance.Name)
-// model.FlavorId = types.StringPointerValue(instance.FlavorId)
-// model.Replicas = types.Int64Value(int64(*instance.Replicas))
-// model.Storage = storageObject
-// model.Version = types.StringValue(string(*instance.Version))
-// model.Edition = types.StringValue(string(*instance.Edition))
-// model.Region = types.StringValue(region)
-// model.Encryption = encryptionObject
-// model.Network = networkObject
-// model.RetentionDays = types.Int64Value(*instance.RetentionDays)
-// model.Status = types.StringValue(string(*instance.Status))
-// model.IsDeletable = types.BoolValue(*instance.IsDeletable)
-// return nil
-//}
-
func toCreatePayload(
ctx context.Context,
model *sqlserverflexResGen.InstanceModel,
@@ -313,52 +167,6 @@ func toCreatePayload(
}, nil
}
-////nolint:unused // TODO: remove if not needed later
-//func toUpdatePartiallyPayload(
-// model *Model,
-// storage *storageModel,
-// network *networkModel,
-//) (*sqlserverflex.UpdateInstancePartiallyRequestPayload, error) {
-// if model == nil {
-// return nil, fmt.Errorf("nil model")
-// }
-//
-// storagePayload := &sqlserverflex.UpdateInstanceRequestPayloadGetStorageArgType{}
-// if storage != nil {
-// storagePayload.Size = conversion.Int64ValueToPointer(storage.Size)
-// }
-//
-// var aclElements []string
-// if network != nil && !network.ACL.IsNull() && !network.ACL.IsUnknown() {
-// aclElements = make([]string, 0, len(network.ACL.Elements()))
-// diags := network.ACL.ElementsAs(context.TODO(), &aclElements, false)
-// if diags.HasError() {
-// return nil, fmt.Errorf("creating network: %w", core.DiagsToError(diags))
-// }
-// }
-//
-// networkPayload := &sqlserverflex.UpdateInstancePartiallyRequestPayloadGetNetworkArgType{}
-// if network != nil {
-// networkPayload.AccessScope = sqlserverflex.UpdateInstancePartiallyRequestPayloadNetworkGetAccessScopeAttributeType(conversion.StringValueToPointer(network.AccessScope))
-// networkPayload.Acl = &aclElements
-// }
-//
-// if model.Replicas.ValueInt64() > math.MaxInt32 {
-// return nil, fmt.Errorf("replica count too big: %d", model.Replicas.ValueInt64())
-// }
-// replCount := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
-// return &sqlserverflex.UpdateInstancePartiallyRequestPayload{
-// BackupSchedule: conversion.StringValueToPointer(model.BackupSchedule),
-// FlavorId: conversion.StringValueToPointer(model.FlavorId),
-// Name: conversion.StringValueToPointer(model.Name),
-// Network: networkPayload,
-// Replicas: sqlserverflex.UpdateInstancePartiallyRequestPayloadGetReplicasAttributeType(&replCount),
-// RetentionDays: conversion.Int64ValueToPointer(model.RetentionDays),
-// Storage: storagePayload,
-// Version: sqlserverflex.UpdateInstancePartiallyRequestPayloadGetVersionAttributeType(conversion.StringValueToPointer(model.Version)),
-// }, nil
-//}
-
// TODO: check func with his args
func toUpdatePayload(
ctx context.Context,
@@ -380,9 +188,8 @@ func toUpdatePayload(
BackupSchedule: m.BackupSchedule.ValueStringPointer(),
FlavorId: m.FlavorId.ValueStringPointer(),
Name: m.Name.ValueStringPointer(),
- Network: &sqlserverflex.CreateInstanceRequestPayloadNetwork{
- AccessScope: sqlserverflex.CreateInstanceRequestPayloadNetworkGetAccessScopeAttributeType(m.Network.AccessScope.ValueStringPointer()),
- Acl: &netAcl,
+ Network: &sqlserverflex.UpdateInstanceRequestPayloadNetwork{
+ Acl: &netAcl,
},
Replicas: &replVal,
RetentionDays: m.RetentionDays.ValueInt64Pointer(),
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go
index 66436cac..9257c8df 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go
@@ -1,6 +1,6 @@
// Copyright (c) STACKIT
-package sqlserverflex
+package sqlserverflexalpha
import (
"context"
@@ -592,7 +592,6 @@ func (r *instanceResource) Read(
ctx = core.LogResponse(ctx)
// Map response body to schema
- // err = mapFields(ctx, instanceResp, &model, storage, encryption, network, region)
err = mapResponseToModel(ctx, instanceResp, &model, resp.Diagnostics)
if err != nil {
core.LogAndAddError(
@@ -755,6 +754,7 @@ func (r *instanceResource) ImportState(
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
+ // TODO
idParts := strings.Split(req.ID, core.Separator)
if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go b/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go
index 7768f1e9..175711ff 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go
@@ -1,18 +1,4 @@
-// Copyright (c) STACKIT
-
-package sqlserverflex
-
-import (
- "context"
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
- sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
-)
+package sqlserverflexalpha
// type sqlserverflexClientMocked struct {
// returnError bool
@@ -27,343 +13,343 @@ import (
// return c.listFlavorsResp, nil
// }
-func TestMapFields(t *testing.T) {
- t.Skip("Skipping - needs refactoring")
- const testRegion = "region"
- tests := []struct {
- description string
- state Model
- input *sqlserverflex.GetInstanceResponse
- storage *storageModel
- encryption *encryptionModel
- network *networkModel
- region string
- expected Model
- isValid bool
- }{
- {
- "default_values",
- Model{
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Replicas: types.Int64Value(1),
- RetentionDays: types.Int64Value(1),
- Version: types.StringValue("v1"),
- Edition: types.StringValue("edition 1"),
- Status: types.StringValue("status"),
- IsDeletable: types.BoolValue(true),
- },
- &sqlserverflex.GetInstanceResponse{
- FlavorId: utils.Ptr("flavor_id"),
- Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(1))),
- RetentionDays: utils.Ptr(int64(1)),
- Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("v1")),
- Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition 1")),
- Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
- IsDeletable: utils.Ptr(true),
- },
- &storageModel{},
- &encryptionModel{},
- &networkModel{
- ACL: types.ListNull(basetypes.StringType{}),
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid"),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringNull(),
- BackupSchedule: types.StringNull(),
- Replicas: types.Int64Value(1),
- Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
- "class": types.StringNull(),
- "size": types.Int64Null(),
- }),
- Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{
- "keyring_id": types.StringNull(),
- "key_id": types.StringNull(),
- "key_version": types.StringNull(),
- "service_account": types.StringNull(),
- }),
- Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{
- "acl": types.ListNull(types.StringType),
- "access_scope": types.StringNull(),
- "instance_address": types.StringNull(),
- "router_address": types.StringNull(),
- }),
- IsDeletable: types.BoolValue(true),
- Edition: types.StringValue("edition 1"),
- Status: types.StringValue("status"),
- RetentionDays: types.Int64Value(1),
- Version: types.StringValue("v1"),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "simple_values",
- Model{
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- },
- &sqlserverflex.GetInstanceResponse{
- BackupSchedule: utils.Ptr("schedule"),
- FlavorId: utils.Ptr("flavor_id"),
- Id: utils.Ptr("iid"),
- Name: utils.Ptr("name"),
- Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))),
- Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
- Storage: &sqlserverflex.Storage{
- Class: utils.Ptr("class"),
- Size: utils.Ptr(int64(78)),
- },
- Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")),
- RetentionDays: utils.Ptr(int64(1)),
- Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")),
- IsDeletable: utils.Ptr(true),
- Encryption: nil,
- Network: &sqlserverflex.InstanceNetwork{
- AccessScope: nil,
- Acl: &[]string{
- "ip1",
- "ip2",
- "",
- },
- InstanceAddress: nil,
- RouterAddress: nil,
- },
- },
- &storageModel{},
- &encryptionModel{},
- &networkModel{
- ACL: types.ListValueMust(basetypes.StringType{}, []attr.Value{
- types.StringValue("ip1"),
- types.StringValue("ip2"),
- types.StringValue(""),
- }),
- },
- testRegion,
- Model{
- Id: types.StringValue("pid,region,iid"),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue("name"),
- BackupSchedule: types.StringValue("schedule"),
- Replicas: types.Int64Value(56),
- Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
- "class": types.StringValue("class"),
- "size": types.Int64Value(78),
- }),
- Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{
- "acl": types.ListValueMust(types.StringType, []attr.Value{
- types.StringValue("ip1"),
- types.StringValue("ip2"),
- types.StringValue(""),
- }),
- "access_scope": types.StringNull(),
- "instance_address": types.StringNull(),
- "router_address": types.StringNull(),
- }),
- Edition: types.StringValue("edition"),
- RetentionDays: types.Int64Value(1),
- Version: types.StringValue("version"),
- Region: types.StringValue(testRegion),
- IsDeletable: types.BoolValue(true),
- Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{
- "keyring_id": types.StringNull(),
- "key_id": types.StringNull(),
- "key_version": types.StringNull(),
- "service_account": types.StringNull(),
- }),
- Status: types.StringValue("status"),
- },
- true,
- },
- // {
- // "simple_values_no_flavor_and_storage",
- // Model{
- // InstanceId: types.StringValue("iid"),
- // ProjectId: types.StringValue("pid"),
- // },
- // &sqlserverflex.GetInstanceResponse{
- // Acl: &[]string{
- // "ip1",
- // "ip2",
- // "",
- // },
- // BackupSchedule: utils.Ptr("schedule"),
- // FlavorId: nil,
- // Id: utils.Ptr("iid"),
- // Name: utils.Ptr("name"),
- // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))),
- // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
- // Storage: nil,
- // Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")),
- // RetentionDays: utils.Ptr(int64(1)),
- // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")),
- // },
- // &flavorModel{
- // CPU: types.Int64Value(12),
- // RAM: types.Int64Value(34),
- // },
- // &storageModel{
- // Class: types.StringValue("class"),
- // Size: types.Int64Value(78),
- // },
- // &optionsModel{
- // Edition: types.StringValue("edition"),
- // RetentionDays: types.Int64Value(1),
- // },
- // testRegion,
- // Model{
- // Id: types.StringValue("pid,region,iid"),
- // InstanceId: types.StringValue("iid"),
- // ProjectId: types.StringValue("pid"),
- // Name: types.StringValue("name"),
- // ACL: types.ListValueMust(types.StringType, []attr.Value{
- // types.StringValue("ip1"),
- // types.StringValue("ip2"),
- // types.StringValue(""),
- // }),
- // BackupSchedule: types.StringValue("schedule"),
- // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{
- // "id": types.StringNull(),
- // "description": types.StringNull(),
- // "cpu": types.Int64Value(12),
- // "ram": types.Int64Value(34),
- // }),
- // Replicas: types.Int64Value(56),
- // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
- // "class": types.StringValue("class"),
- // "size": types.Int64Value(78),
- // }),
- // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{
- // "edition": types.StringValue("edition"),
- // "retention_days": types.Int64Value(1),
- // }),
- // Version: types.StringValue("version"),
- // Region: types.StringValue(testRegion),
- // },
- // true,
- // },
- // {
- // "acls_unordered",
- // Model{
- // InstanceId: types.StringValue("iid"),
- // ProjectId: types.StringValue("pid"),
- // ACL: types.ListValueMust(types.StringType, []attr.Value{
- // types.StringValue("ip2"),
- // types.StringValue(""),
- // types.StringValue("ip1"),
- // }),
- // },
- // &sqlserverflex.GetInstanceResponse{
- // Acl: &[]string{
- // "",
- // "ip1",
- // "ip2",
- // },
- // BackupSchedule: utils.Ptr("schedule"),
- // FlavorId: nil,
- // Id: utils.Ptr("iid"),
- // Name: utils.Ptr("name"),
- // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))),
- // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
- // Storage: nil,
- // //Options: &map[string]string{
- // // "edition": "edition",
- // // "retentionDays": "1",
- // //},
- // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")),
- // },
- // &flavorModel{
- // CPU: types.Int64Value(12),
- // RAM: types.Int64Value(34),
- // },
- // &storageModel{
- // Class: types.StringValue("class"),
- // Size: types.Int64Value(78),
- // },
- // &optionsModel{},
- // testRegion,
- // Model{
- // Id: types.StringValue("pid,region,iid"),
- // InstanceId: types.StringValue("iid"),
- // ProjectId: types.StringValue("pid"),
- // Name: types.StringValue("name"),
- // ACL: types.ListValueMust(types.StringType, []attr.Value{
- // types.StringValue("ip2"),
- // types.StringValue(""),
- // types.StringValue("ip1"),
- // }),
- // BackupSchedule: types.StringValue("schedule"),
- // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{
- // "id": types.StringNull(),
- // "description": types.StringNull(),
- // "cpu": types.Int64Value(12),
- // "ram": types.Int64Value(34),
- // }),
- // Replicas: types.Int64Value(56),
- // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
- // "class": types.StringValue("class"),
- // "size": types.Int64Value(78),
- // }),
- // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{
- // "edition": types.StringValue("edition"),
- // "retention_days": types.Int64Value(1),
- // }),
- // Version: types.StringValue("version"),
- // Region: types.StringValue(testRegion),
- // },
- // true,
- // },
- // {
- // "nil_response",
- // Model{
- // InstanceId: types.StringValue("iid"),
- // ProjectId: types.StringValue("pid"),
- // },
- // nil,
- // &flavorModel{},
- // &storageModel{},
- // &optionsModel{},
- // testRegion,
- // Model{},
- // false,
- // },
- // {
- // "no_resource_id",
- // Model{
- // InstanceId: types.StringValue("iid"),
- // ProjectId: types.StringValue("pid"),
- // },
- // &sqlserverflex.GetInstanceResponse{},
- // &flavorModel{},
- // &storageModel{},
- // &optionsModel{},
- // testRegion,
- // Model{},
- // false,
- // },
- }
- for _, tt := range tests {
- t.Run(tt.description, func(t *testing.T) {
- err := mapFields(context.Background(), tt.input, &tt.state, tt.storage, tt.encryption, tt.network, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(tt.state, tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- })
- }
-}
+//func TestMapFields(t *testing.T) {
+// t.Skip("Skipping - needs refactoring")
+// const testRegion = "region"
+// tests := []struct {
+// description string
+// state Model
+// input *sqlserverflex.GetInstanceResponse
+// storage *storageModel
+// encryption *encryptionModel
+// network *networkModel
+// region string
+// expected Model
+// isValid bool
+// }{
+// {
+// "default_values",
+// Model{
+// InstanceId: types.StringValue("iid"),
+// ProjectId: types.StringValue("pid"),
+// Replicas: types.Int64Value(1),
+// RetentionDays: types.Int64Value(1),
+// Version: types.StringValue("v1"),
+// Edition: types.StringValue("edition 1"),
+// Status: types.StringValue("status"),
+// IsDeletable: types.BoolValue(true),
+// },
+// &sqlserverflex.GetInstanceResponse{
+// FlavorId: utils.Ptr("flavor_id"),
+// Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(1))),
+// RetentionDays: utils.Ptr(int64(1)),
+// Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("v1")),
+// Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition 1")),
+// Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
+// IsDeletable: utils.Ptr(true),
+// },
+// &storageModel{},
+// &encryptionModel{},
+// &networkModel{
+// ACL: types.ListNull(basetypes.StringType{}),
+// },
+// testRegion,
+// Model{
+// Id: types.StringValue("pid,region,iid"),
+// InstanceId: types.StringValue("iid"),
+// ProjectId: types.StringValue("pid"),
+// Name: types.StringNull(),
+// BackupSchedule: types.StringNull(),
+// Replicas: types.Int64Value(1),
+// Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
+// "class": types.StringNull(),
+// "size": types.Int64Null(),
+// }),
+// Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{
+// "keyring_id": types.StringNull(),
+// "key_id": types.StringNull(),
+// "key_version": types.StringNull(),
+// "service_account": types.StringNull(),
+// }),
+// Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{
+// "acl": types.ListNull(types.StringType),
+// "access_scope": types.StringNull(),
+// "instance_address": types.StringNull(),
+// "router_address": types.StringNull(),
+// }),
+// IsDeletable: types.BoolValue(true),
+// Edition: types.StringValue("edition 1"),
+// Status: types.StringValue("status"),
+// RetentionDays: types.Int64Value(1),
+// Version: types.StringValue("v1"),
+// Region: types.StringValue(testRegion),
+// },
+// true,
+// },
+// {
+// "simple_values",
+// Model{
+// InstanceId: types.StringValue("iid"),
+// ProjectId: types.StringValue("pid"),
+// },
+// &sqlserverflex.GetInstanceResponse{
+// BackupSchedule: utils.Ptr("schedule"),
+// FlavorId: utils.Ptr("flavor_id"),
+// Id: utils.Ptr("iid"),
+// Name: utils.Ptr("name"),
+// Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))),
+// Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
+// Storage: &sqlserverflex.Storage{
+// Class: utils.Ptr("class"),
+// Size: utils.Ptr(int64(78)),
+// },
+// Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")),
+// RetentionDays: utils.Ptr(int64(1)),
+// Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")),
+// IsDeletable: utils.Ptr(true),
+// Encryption: nil,
+// Network: &sqlserverflex.InstanceNetwork{
+// AccessScope: nil,
+// Acl: &[]string{
+// "ip1",
+// "ip2",
+// "",
+// },
+// InstanceAddress: nil,
+// RouterAddress: nil,
+// },
+// },
+// &storageModel{},
+// &encryptionModel{},
+// &networkModel{
+// ACL: types.ListValueMust(basetypes.StringType{}, []attr.Value{
+// types.StringValue("ip1"),
+// types.StringValue("ip2"),
+// types.StringValue(""),
+// }),
+// },
+// testRegion,
+// Model{
+// Id: types.StringValue("pid,region,iid"),
+// InstanceId: types.StringValue("iid"),
+// ProjectId: types.StringValue("pid"),
+// Name: types.StringValue("name"),
+// BackupSchedule: types.StringValue("schedule"),
+// Replicas: types.Int64Value(56),
+// Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
+// "class": types.StringValue("class"),
+// "size": types.Int64Value(78),
+// }),
+// Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{
+// "acl": types.ListValueMust(types.StringType, []attr.Value{
+// types.StringValue("ip1"),
+// types.StringValue("ip2"),
+// types.StringValue(""),
+// }),
+// "access_scope": types.StringNull(),
+// "instance_address": types.StringNull(),
+// "router_address": types.StringNull(),
+// }),
+// Edition: types.StringValue("edition"),
+// RetentionDays: types.Int64Value(1),
+// Version: types.StringValue("version"),
+// Region: types.StringValue(testRegion),
+// IsDeletable: types.BoolValue(true),
+// Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{
+// "keyring_id": types.StringNull(),
+// "key_id": types.StringNull(),
+// "key_version": types.StringNull(),
+// "service_account": types.StringNull(),
+// }),
+// Status: types.StringValue("status"),
+// },
+// true,
+// },
+// // {
+// // "simple_values_no_flavor_and_storage",
+// // Model{
+// // InstanceId: types.StringValue("iid"),
+// // ProjectId: types.StringValue("pid"),
+// // },
+// // &sqlserverflex.GetInstanceResponse{
+// // Acl: &[]string{
+// // "ip1",
+// // "ip2",
+// // "",
+// // },
+// // BackupSchedule: utils.Ptr("schedule"),
+// // FlavorId: nil,
+// // Id: utils.Ptr("iid"),
+// // Name: utils.Ptr("name"),
+// // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))),
+// // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
+// // Storage: nil,
+// // Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")),
+// // RetentionDays: utils.Ptr(int64(1)),
+// // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")),
+// // },
+// // &flavorModel{
+// // CPU: types.Int64Value(12),
+// // RAM: types.Int64Value(34),
+// // },
+// // &storageModel{
+// // Class: types.StringValue("class"),
+// // Size: types.Int64Value(78),
+// // },
+// // &optionsModel{
+// // Edition: types.StringValue("edition"),
+// // RetentionDays: types.Int64Value(1),
+// // },
+// // testRegion,
+// // Model{
+// // Id: types.StringValue("pid,region,iid"),
+// // InstanceId: types.StringValue("iid"),
+// // ProjectId: types.StringValue("pid"),
+// // Name: types.StringValue("name"),
+// // ACL: types.ListValueMust(types.StringType, []attr.Value{
+// // types.StringValue("ip1"),
+// // types.StringValue("ip2"),
+// // types.StringValue(""),
+// // }),
+// // BackupSchedule: types.StringValue("schedule"),
+// // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{
+// // "id": types.StringNull(),
+// // "description": types.StringNull(),
+// // "cpu": types.Int64Value(12),
+// // "ram": types.Int64Value(34),
+// // }),
+// // Replicas: types.Int64Value(56),
+// // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
+// // "class": types.StringValue("class"),
+// // "size": types.Int64Value(78),
+// // }),
+// // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{
+// // "edition": types.StringValue("edition"),
+// // "retention_days": types.Int64Value(1),
+// // }),
+// // Version: types.StringValue("version"),
+// // Region: types.StringValue(testRegion),
+// // },
+// // true,
+// // },
+// // {
+// // "acls_unordered",
+// // Model{
+// // InstanceId: types.StringValue("iid"),
+// // ProjectId: types.StringValue("pid"),
+// // ACL: types.ListValueMust(types.StringType, []attr.Value{
+// // types.StringValue("ip2"),
+// // types.StringValue(""),
+// // types.StringValue("ip1"),
+// // }),
+// // },
+// // &sqlserverflex.GetInstanceResponse{
+// // Acl: &[]string{
+// // "",
+// // "ip1",
+// // "ip2",
+// // },
+// // BackupSchedule: utils.Ptr("schedule"),
+// // FlavorId: nil,
+// // Id: utils.Ptr("iid"),
+// // Name: utils.Ptr("name"),
+// // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))),
+// // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
+// // Storage: nil,
+// // //Options: &map[string]string{
+// // // "edition": "edition",
+// // // "retentionDays": "1",
+// // //},
+// // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")),
+// // },
+// // &flavorModel{
+// // CPU: types.Int64Value(12),
+// // RAM: types.Int64Value(34),
+// // },
+// // &storageModel{
+// // Class: types.StringValue("class"),
+// // Size: types.Int64Value(78),
+// // },
+// // &optionsModel{},
+// // testRegion,
+// // Model{
+// // Id: types.StringValue("pid,region,iid"),
+// // InstanceId: types.StringValue("iid"),
+// // ProjectId: types.StringValue("pid"),
+// // Name: types.StringValue("name"),
+// // ACL: types.ListValueMust(types.StringType, []attr.Value{
+// // types.StringValue("ip2"),
+// // types.StringValue(""),
+// // types.StringValue("ip1"),
+// // }),
+// // BackupSchedule: types.StringValue("schedule"),
+// // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{
+// // "id": types.StringNull(),
+// // "description": types.StringNull(),
+// // "cpu": types.Int64Value(12),
+// // "ram": types.Int64Value(34),
+// // }),
+// // Replicas: types.Int64Value(56),
+// // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
+// // "class": types.StringValue("class"),
+// // "size": types.Int64Value(78),
+// // }),
+// // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{
+// // "edition": types.StringValue("edition"),
+// // "retention_days": types.Int64Value(1),
+// // }),
+// // Version: types.StringValue("version"),
+// // Region: types.StringValue(testRegion),
+// // },
+// // true,
+// // },
+// // {
+// // "nil_response",
+// // Model{
+// // InstanceId: types.StringValue("iid"),
+// // ProjectId: types.StringValue("pid"),
+// // },
+// // nil,
+// // &flavorModel{},
+// // &storageModel{},
+// // &optionsModel{},
+// // testRegion,
+// // Model{},
+// // false,
+// // },
+// // {
+// // "no_resource_id",
+// // Model{
+// // InstanceId: types.StringValue("iid"),
+// // ProjectId: types.StringValue("pid"),
+// // },
+// // &sqlserverflex.GetInstanceResponse{},
+// // &flavorModel{},
+// // &storageModel{},
+// // &optionsModel{},
+// // testRegion,
+// // Model{},
+// // false,
+// // },
+// }
+// for _, tt := range tests {
+// t.Run(tt.description, func(t *testing.T) {
+// err := mapFields(context.Background(), tt.input, &tt.state, tt.storage, tt.encryption, tt.network, tt.region)
+// if !tt.isValid && err == nil {
+// t.Fatalf("Should have failed")
+// }
+// if tt.isValid && err != nil {
+// t.Fatalf("Should not have failed: %v", err)
+// }
+// if tt.isValid {
+// diff := cmp.Diff(tt.state, tt.expected)
+// if diff != "" {
+// t.Fatalf("Data does not match: %s", diff)
+// }
+// }
+// })
+// }
+//}
// func TestToCreatePayload(t *testing.T) {
// tests := []struct {
diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util.go b/stackit/internal/services/sqlserverflexalpha/utils/util.go
index 4180955b..db031162 100644
--- a/stackit/internal/services/sqlserverflexalpha/utils/util.go
+++ b/stackit/internal/services/sqlserverflexalpha/utils/util.go
@@ -1,5 +1,3 @@
-// Copyright (c) STACKIT
-
package utils
import (
diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go b/stackit/internal/services/sqlserverflexalpha/utils/util_test.go
index 7818408d..7afd6b1d 100644
--- a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go
+++ b/stackit/internal/services/sqlserverflexalpha/utils/util_test.go
@@ -1,5 +1,3 @@
-// Copyright (c) STACKIT
-
package utils
import (
diff --git a/stackit/internal/services/sqlserverflexalpha/version/datasource.go b/stackit/internal/services/sqlserverflexalpha/version/datasource.go
deleted file mode 100644
index 707ba2f9..00000000
--- a/stackit/internal/services/sqlserverflexalpha/version/datasource.go
+++ /dev/null
@@ -1,71 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "context"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
-
- sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/version/datasources_gen"
-)
-
-var (
- _ datasource.DataSource = (*versionDataSource)(nil)
- _ datasource.DataSourceWithConfigure = (*versionDataSource)(nil)
-)
-
-func NewVersionDataSource() datasource.DataSource {
- return &versionDataSource{}
-}
-
-type versionDataSource struct {
- client *sqlserverflexalpha.APIClient
- providerData core.ProviderData
-}
-
-func (d *versionDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_version"
-}
-
-func (d *versionDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexalphaGen.VersionDataSourceSchema(ctx)
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *versionDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
- if resp.Diagnostics.HasError() {
- return
- }
- d.client = apiClient
- tflog.Info(ctx, "SQL SERVER Flex version client configured")
-}
-
-func (d *versionDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data sqlserverflexalphaGen.VersionModel
-
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Todo: Read API call logic
-
- // Example data value setting
- // data.Id = types.StringValue("example-id")
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/version/datasources_gen/version_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/version/datasources_gen/version_data_source_gen.go
deleted file mode 100644
index cb9008f1..00000000
--- a/stackit/internal/services/sqlserverflexalpha/version/datasources_gen/version_data_source_gen.go
+++ /dev/null
@@ -1,569 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexalpha
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func VersionDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "versions": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "beta": schema.BoolAttribute{
- Computed: true,
- Description: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.",
- MarkdownDescription: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.",
- },
- "deprecated": schema.StringAttribute{
- Computed: true,
- Description: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.",
- MarkdownDescription: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.",
- },
- "recommend": schema.BoolAttribute{
- Computed: true,
- Description: "Flag if the version is recommend by the STACKIT Team.",
- MarkdownDescription: "Flag if the version is recommend by the STACKIT Team.",
- },
- "version": schema.StringAttribute{
- Computed: true,
- Description: "The sqlserver version used for the instance.",
- MarkdownDescription: "The sqlserver version used for the instance.",
- },
- },
- CustomType: VersionsType{
- ObjectType: types.ObjectType{
- AttrTypes: VersionsValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "A list containing available sqlserver versions.",
- MarkdownDescription: "A list containing available sqlserver versions.",
- },
- },
- }
-}
-
-type VersionModel struct {
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Versions types.List `tfsdk:"versions"`
-}
-
-var _ basetypes.ObjectTypable = VersionsType{}
-
-type VersionsType struct {
- basetypes.ObjectType
-}
-
-func (t VersionsType) Equal(o attr.Type) bool {
- other, ok := o.(VersionsType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t VersionsType) String() string {
- return "VersionsType"
-}
-
-func (t VersionsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- betaAttribute, ok := attributes["beta"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `beta is missing from object`)
-
- return nil, diags
- }
-
- betaVal, ok := betaAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute))
- }
-
- deprecatedAttribute, ok := attributes["deprecated"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `deprecated is missing from object`)
-
- return nil, diags
- }
-
- deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute))
- }
-
- recommendAttribute, ok := attributes["recommend"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `recommend is missing from object`)
-
- return nil, diags
- }
-
- recommendVal, ok := recommendAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute))
- }
-
- versionAttribute, ok := attributes["version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `version is missing from object`)
-
- return nil, diags
- }
-
- versionVal, ok := versionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return VersionsValue{
- Beta: betaVal,
- Deprecated: deprecatedVal,
- Recommend: recommendVal,
- Version: versionVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewVersionsValueNull() VersionsValue {
- return VersionsValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewVersionsValueUnknown() VersionsValue {
- return VersionsValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewVersionsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (VersionsValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing VersionsValue Attribute Value",
- "While creating a VersionsValue value, a missing attribute value was detected. "+
- "A VersionsValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid VersionsValue Attribute Type",
- "While creating a VersionsValue value, an invalid attribute value was detected. "+
- "A VersionsValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("VersionsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra VersionsValue Attribute Value",
- "While creating a VersionsValue value, an extra attribute value was detected. "+
- "A VersionsValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra VersionsValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewVersionsValueUnknown(), diags
- }
-
- betaAttribute, ok := attributes["beta"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `beta is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- betaVal, ok := betaAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute))
- }
-
- deprecatedAttribute, ok := attributes["deprecated"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `deprecated is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute))
- }
-
- recommendAttribute, ok := attributes["recommend"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `recommend is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- recommendVal, ok := recommendAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute))
- }
-
- versionAttribute, ok := attributes["version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `version is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- versionVal, ok := versionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
- }
-
- if diags.HasError() {
- return NewVersionsValueUnknown(), diags
- }
-
- return VersionsValue{
- Beta: betaVal,
- Deprecated: deprecatedVal,
- Recommend: recommendVal,
- Version: versionVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewVersionsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) VersionsValue {
- object, diags := NewVersionsValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewVersionsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t VersionsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewVersionsValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewVersionsValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewVersionsValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewVersionsValueMust(VersionsValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t VersionsType) ValueType(ctx context.Context) attr.Value {
- return VersionsValue{}
-}
-
-var _ basetypes.ObjectValuable = VersionsValue{}
-
-type VersionsValue struct {
- Beta basetypes.BoolValue `tfsdk:"beta"`
- Deprecated basetypes.StringValue `tfsdk:"deprecated"`
- Recommend basetypes.BoolValue `tfsdk:"recommend"`
- Version basetypes.StringValue `tfsdk:"version"`
- state attr.ValueState
-}
-
-func (v VersionsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["beta"] = basetypes.BoolType{}.TerraformType(ctx)
- attrTypes["deprecated"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["recommend"] = basetypes.BoolType{}.TerraformType(ctx)
- attrTypes["version"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.Beta.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["beta"] = val
-
- val, err = v.Deprecated.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["deprecated"] = val
-
- val, err = v.Recommend.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["recommend"] = val
-
- val, err = v.Version.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["version"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v VersionsValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v VersionsValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v VersionsValue) String() string {
- return "VersionsValue"
-}
-
-func (v VersionsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "beta": basetypes.BoolType{},
- "deprecated": basetypes.StringType{},
- "recommend": basetypes.BoolType{},
- "version": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "beta": v.Beta,
- "deprecated": v.Deprecated,
- "recommend": v.Recommend,
- "version": v.Version,
- })
-
- return objVal, diags
-}
-
-func (v VersionsValue) Equal(o attr.Value) bool {
- other, ok := o.(VersionsValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Beta.Equal(other.Beta) {
- return false
- }
-
- if !v.Deprecated.Equal(other.Deprecated) {
- return false
- }
-
- if !v.Recommend.Equal(other.Recommend) {
- return false
- }
-
- if !v.Version.Equal(other.Version) {
- return false
- }
-
- return true
-}
-
-func (v VersionsValue) Type(ctx context.Context) attr.Type {
- return VersionsType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v VersionsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "beta": basetypes.BoolType{},
- "deprecated": basetypes.StringType{},
- "recommend": basetypes.BoolType{},
- "version": basetypes.StringType{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go
new file mode 100644
index 00000000..70fbaca4
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/database/datasource.go
@@ -0,0 +1,150 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+ sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/datasources_gen"
+)
+
+var _ datasource.DataSource = (*databaseDataSource)(nil)
+
+const errorPrefix = "[Sqlserverflexbeta - Database]"
+
+func NewDatabaseDataSource() datasource.DataSource {
+ return &databaseDataSource{}
+}
+
+type databaseDataSource struct {
+ client *sqlserverflexbetaPkg.APIClient
+ providerData core.ProviderData
+}
+
+func (d *databaseDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database"
+}
+
+func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = sqlserverflexbetaGen.DatabaseDataSourceSchema(ctx)
+}
+
+// Configure adds the provider configured client to the data source.
+func (d *databaseDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
+ var ok bool
+ d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClientConfigOptions := []config.ConfigurationOption{
+ config.WithCustomAuth(d.providerData.RoundTripper),
+ utils.UserAgentConfigOption(d.providerData.Version),
+ }
+ if d.providerData.SQLServerFlexCustomEndpoint != "" {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
+ )
+ } else {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithRegion(d.providerData.GetRegion()),
+ )
+ }
+ apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error configuring API client",
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
+ )
+ return
+ }
+ d.client = apiClient
+ tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
+}
+
+func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data sqlserverflexbetaGen.DatabaseModel
+ readErr := "Read DB error"
+
+ // Read Terraform configuration data into the model
+ resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := data.ProjectId.ValueString()
+ region := d.providerData.GetRegionWithOverride(data.Region)
+ instanceId := data.InstanceId.ValueString()
+
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
+
+ databaseName := data.DatabaseName.ValueString()
+
+ databaseResp, err := d.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
+ if err != nil {
+ utils.LogError(
+ ctx,
+ &resp.Diagnostics,
+ err,
+ "Reading database",
+ fmt.Sprintf("database with %q does not exist in project %q.", databaseName, projectId),
+ map[int]string{
+ http.StatusForbidden: fmt.Sprintf("Project with %q not found or forbidden access", projectId),
+ },
+ )
+ resp.State.RemoveResource(ctx)
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ dbId, ok := databaseResp.GetIdOk()
+ if !ok {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ readErr,
+ "Database creation waiting: returned id is nil",
+ )
+ return
+ }
+ data.Id = types.Int64Value(dbId)
+
+ owner, ok := databaseResp.GetOwnerOk()
+ if !ok {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ readErr,
+ "Database creation waiting: returned owner is nil",
+ )
+ return
+ }
+ data.Owner = types.StringValue(owner)
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go
new file mode 100644
index 00000000..cfdc1a86
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go
@@ -0,0 +1,82 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "collation_name": schema.StringAttribute{
+ Computed: true,
+ Description: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
+ MarkdownDescription: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
+ },
+ "compatibility_level": schema.Int64Attribute{
+ Computed: true,
+ Description: "CompatibilityLevel of the Database.",
+ MarkdownDescription: "CompatibilityLevel of the Database.",
+ },
+ "database_name": schema.StringAttribute{
+ Required: true,
+ Description: "The name of the database.",
+ MarkdownDescription: "The name of the database.",
+ },
+ "id": schema.Int64Attribute{
+ Computed: true,
+ Description: "The id of the database.",
+ MarkdownDescription: "The id of the database.",
+ },
+ "instance_id": schema.StringAttribute{
+ Required: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ Description: "The name of the database.",
+ MarkdownDescription: "The name of the database.",
+ },
+ "owner": schema.StringAttribute{
+ Computed: true,
+ Description: "The owner of the database.",
+ MarkdownDescription: "The owner of the database.",
+ },
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ },
+ }
+}
+
+type DatabaseModel struct {
+ CollationName types.String `tfsdk:"collation_name"`
+ CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"`
+ DatabaseName types.String `tfsdk:"database_name"`
+ Id types.Int64 `tfsdk:"id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Name types.String `tfsdk:"name"`
+ Owner types.String `tfsdk:"owner"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go
new file mode 100644
index 00000000..71ec8fb4
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go
@@ -0,0 +1,1180 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "databases": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "created": schema.StringAttribute{
+ Computed: true,
+ Description: "The date when the database was created in RFC3339 format.",
+ MarkdownDescription: "The date when the database was created in RFC3339 format.",
+ },
+ "id": schema.Int64Attribute{
+ Computed: true,
+ Description: "The id of the database.",
+ MarkdownDescription: "The id of the database.",
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ Description: "The name of the database.",
+ MarkdownDescription: "The name of the database.",
+ },
+ "owner": schema.StringAttribute{
+ Computed: true,
+ Description: "The owner of the database.",
+ MarkdownDescription: "The owner of the database.",
+ },
+ },
+ CustomType: DatabasesType{
+ ObjectType: types.ObjectType{
+ AttrTypes: DatabasesValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "A list containing all databases for the instance.",
+ MarkdownDescription: "A list containing all databases for the instance.",
+ },
+ "instance_id": schema.StringAttribute{
+ Required: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "page": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of the page of items list to be returned.",
+ MarkdownDescription: "Number of the page of items list to be returned.",
+ },
+ "pagination": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "page": schema.Int64Attribute{
+ Computed: true,
+ },
+ "size": schema.Int64Attribute{
+ Computed: true,
+ },
+ "sort": schema.StringAttribute{
+ Computed: true,
+ },
+ "total_pages": schema.Int64Attribute{
+ Computed: true,
+ },
+ "total_rows": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: PaginationType{
+ ObjectType: types.ObjectType{
+ AttrTypes: PaginationValue{}.AttributeTypes(ctx),
+ },
+ },
+ Computed: true,
+ },
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "size": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of items to be returned on each page.",
+ MarkdownDescription: "Number of items to be returned on each page.",
+ },
+ "sort": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "Sorting of the databases to be returned on each page.",
+ MarkdownDescription: "Sorting of the databases to be returned on each page.",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "created_at.desc",
+ "created_at.asc",
+ "database_id.desc",
+ "database_id.asc",
+ "database_name.desc",
+ "database_name.asc",
+ "database_owner.desc",
+ "database_owner.asc",
+ "index.asc",
+ "index.desc",
+ ),
+ },
+ },
+ },
+ }
+}
+
+type DatabasesModel struct {
+ Databases types.List `tfsdk:"databases"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Page types.Int64 `tfsdk:"page"`
+ Pagination PaginationValue `tfsdk:"pagination"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Size types.Int64 `tfsdk:"size"`
+ Sort types.String `tfsdk:"sort"`
+}
+
+var _ basetypes.ObjectTypable = DatabasesType{}
+
+type DatabasesType struct {
+ basetypes.ObjectType
+}
+
+func (t DatabasesType) Equal(o attr.Type) bool {
+ other, ok := o.(DatabasesType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t DatabasesType) String() string {
+ return "DatabasesType"
+}
+
+func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ createdAttribute, ok := attributes["created"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `created is missing from object`)
+
+ return nil, diags
+ }
+
+ createdVal, ok := createdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute))
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return nil, diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
+ }
+
+ nameAttribute, ok := attributes["name"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `name is missing from object`)
+
+ return nil, diags
+ }
+
+ nameVal, ok := nameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
+ }
+
+ ownerAttribute, ok := attributes["owner"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `owner is missing from object`)
+
+ return nil, diags
+ }
+
+ ownerVal, ok := ownerAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`owner expected to be basetypes.StringValue, was: %T`, ownerAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return DatabasesValue{
+ Created: createdVal,
+ Id: idVal,
+ Name: nameVal,
+ Owner: ownerVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewDatabasesValueNull() DatabasesValue {
+ return DatabasesValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewDatabasesValueUnknown() DatabasesValue {
+ return DatabasesValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (DatabasesValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing DatabasesValue Attribute Value",
+ "While creating a DatabasesValue value, a missing attribute value was detected. "+
+ "A DatabasesValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("DatabasesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid DatabasesValue Attribute Type",
+ "While creating a DatabasesValue value, an invalid attribute value was detected. "+
+ "A DatabasesValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("DatabasesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("DatabasesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra DatabasesValue Attribute Value",
+ "While creating a DatabasesValue value, an extra attribute value was detected. "+
+ "A DatabasesValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra DatabasesValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewDatabasesValueUnknown(), diags
+ }
+
+ createdAttribute, ok := attributes["created"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `created is missing from object`)
+
+ return NewDatabasesValueUnknown(), diags
+ }
+
+ createdVal, ok := createdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute))
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return NewDatabasesValueUnknown(), diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
+ }
+
+ nameAttribute, ok := attributes["name"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `name is missing from object`)
+
+ return NewDatabasesValueUnknown(), diags
+ }
+
+ nameVal, ok := nameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
+ }
+
+ ownerAttribute, ok := attributes["owner"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `owner is missing from object`)
+
+ return NewDatabasesValueUnknown(), diags
+ }
+
+ ownerVal, ok := ownerAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`owner expected to be basetypes.StringValue, was: %T`, ownerAttribute))
+ }
+
+ if diags.HasError() {
+ return NewDatabasesValueUnknown(), diags
+ }
+
+ return DatabasesValue{
+ Created: createdVal,
+ Id: idVal,
+ Name: nameVal,
+ Owner: ownerVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewDatabasesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) DatabasesValue {
+ object, diags := NewDatabasesValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewDatabasesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t DatabasesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewDatabasesValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewDatabasesValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewDatabasesValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewDatabasesValueMust(DatabasesValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t DatabasesType) ValueType(ctx context.Context) attr.Value {
+ return DatabasesValue{}
+}
+
+var _ basetypes.ObjectValuable = DatabasesValue{}
+
+type DatabasesValue struct {
+ Created basetypes.StringValue `tfsdk:"created"`
+ Id basetypes.Int64Value `tfsdk:"id"`
+ Name basetypes.StringValue `tfsdk:"name"`
+ Owner basetypes.StringValue `tfsdk:"owner"`
+ state attr.ValueState
+}
+
+func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 4)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["created"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 4)
+
+ val, err = v.Created.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["created"] = val
+
+ val, err = v.Id.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["id"] = val
+
+ val, err = v.Name.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["name"] = val
+
+ val, err = v.Owner.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["owner"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v DatabasesValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v DatabasesValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v DatabasesValue) String() string {
+ return "DatabasesValue"
+}
+
+func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "created": basetypes.StringType{},
+ "id": basetypes.Int64Type{},
+ "name": basetypes.StringType{},
+ "owner": basetypes.StringType{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "created": v.Created,
+ "id": v.Id,
+ "name": v.Name,
+ "owner": v.Owner,
+ })
+
+ return objVal, diags
+}
+
+func (v DatabasesValue) Equal(o attr.Value) bool {
+ other, ok := o.(DatabasesValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Created.Equal(other.Created) {
+ return false
+ }
+
+ if !v.Id.Equal(other.Id) {
+ return false
+ }
+
+ if !v.Name.Equal(other.Name) {
+ return false
+ }
+
+ if !v.Owner.Equal(other.Owner) {
+ return false
+ }
+
+ return true
+}
+
+func (v DatabasesValue) Type(ctx context.Context) attr.Type {
+ return DatabasesType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "created": basetypes.StringType{},
+ "id": basetypes.Int64Type{},
+ "name": basetypes.StringType{},
+ "owner": basetypes.StringType{},
+ }
+}
+
+var _ basetypes.ObjectTypable = PaginationType{}
+
+type PaginationType struct {
+ basetypes.ObjectType
+}
+
+func (t PaginationType) Equal(o attr.Type) bool {
+ other, ok := o.(PaginationType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t PaginationType) String() string {
+ return "PaginationType"
+}
+
+func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return nil, diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return nil, diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return nil, diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return nil, diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return nil, diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueNull() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewPaginationValueUnknown() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing PaginationValue Attribute Value",
+ "While creating a PaginationValue value, a missing attribute value was detected. "+
+ "A PaginationValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid PaginationValue Attribute Type",
+ "While creating a PaginationValue value, an invalid attribute value was detected. "+
+ "A PaginationValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra PaginationValue Attribute Value",
+ "While creating a PaginationValue value, an extra attribute value was detected. "+
+ "A PaginationValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
+ object, diags := NewPaginationValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewPaginationValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewPaginationValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewPaginationValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t PaginationType) ValueType(ctx context.Context) attr.Value {
+ return PaginationValue{}
+}
+
+var _ basetypes.ObjectValuable = PaginationValue{}
+
+type PaginationValue struct {
+ Page basetypes.Int64Value `tfsdk:"page"`
+ Size basetypes.Int64Value `tfsdk:"size"`
+ Sort basetypes.StringValue `tfsdk:"sort"`
+ TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ state attr.ValueState
+}
+
+func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 5)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 5)
+
+ val, err = v.Page.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["page"] = val
+
+ val, err = v.Size.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["size"] = val
+
+ val, err = v.Sort.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["sort"] = val
+
+ val, err = v.TotalPages.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_pages"] = val
+
+ val, err = v.TotalRows.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_rows"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v PaginationValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v PaginationValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v PaginationValue) String() string {
+ return "PaginationValue"
+}
+
+func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "page": v.Page,
+ "size": v.Size,
+ "sort": v.Sort,
+ "total_pages": v.TotalPages,
+ "total_rows": v.TotalRows,
+ })
+
+ return objVal, diags
+}
+
+func (v PaginationValue) Equal(o attr.Value) bool {
+ other, ok := o.(PaginationValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Page.Equal(other.Page) {
+ return false
+ }
+
+ if !v.Size.Equal(other.Size) {
+ return false
+ }
+
+ if !v.Sort.Equal(other.Sort) {
+ return false
+ }
+
+ if !v.TotalPages.Equal(other.TotalPages) {
+ return false
+ }
+
+ if !v.TotalRows.Equal(other.TotalRows) {
+ return false
+ }
+
+ return true
+}
+
+func (v PaginationValue) Type(ctx context.Context) attr.Type {
+ return PaginationType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go
new file mode 100644
index 00000000..b28f5ea0
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/database/resource.go
@@ -0,0 +1,426 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "time"
+
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/resources_gen"
+)
+
+var (
+ _ resource.Resource = &databaseResource{}
+ _ resource.ResourceWithConfigure = &databaseResource{}
+ _ resource.ResourceWithImportState = &databaseResource{}
+ _ resource.ResourceWithModifyPlan = &databaseResource{}
+ _ resource.ResourceWithIdentity = &databaseResource{}
+)
+
+func NewDatabaseResource() resource.Resource {
+ return &databaseResource{}
+}
+
+type databaseResource struct {
+ client *sqlserverflexbeta.APIClient
+ providerData core.ProviderData
+}
+
+type DatabaseResourceIdentityModel struct {
+ ProjectID types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ InstanceID types.String `tfsdk:"instance_id"`
+ DatabaseName types.String `tfsdk:"database_name"`
+}
+
+func (r *databaseResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database"
+}
+
+func (r *databaseResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = sqlserverflexbetaResGen.DatabaseResourceSchema(ctx)
+}
+
+func (r *databaseResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
+ resp.IdentitySchema = identityschema.Schema{
+ Attributes: map[string]identityschema.Attribute{
+ "project_id": identityschema.StringAttribute{
+ RequiredForImport: true, // must be set during import by the practitioner
+ },
+ "region": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ "instance_id": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ "database_name": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ },
+ }
+}
+
+// Configure adds the provider configured client to the resource.
+func (r *databaseResource) Configure(
+ ctx context.Context,
+ req resource.ConfigureRequest,
+ resp *resource.ConfigureResponse,
+) {
+ var ok bool
+ r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClientConfigOptions := []config.ConfigurationOption{
+ config.WithCustomAuth(r.providerData.RoundTripper),
+ utils.UserAgentConfigOption(r.providerData.Version),
+ }
+ if r.providerData.SQLServerFlexCustomEndpoint != "" {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint))
+ } else {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
+ }
+ apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error configuring API client",
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
+ )
+ return
+ }
+ r.client = apiClient
+ tflog.Info(ctx, "sqlserverflexbeta.Database client configured")
+}
+
+func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data sqlserverflexbetaResGen.DatabaseModel
+ createErr := "DB create error"
+
+ // Read Terraform plan data into the model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
+
+ databaseName := identityData.DatabaseName.ValueString()
+ ctx = tflog.SetField(ctx, "database_name", databaseName)
+
+ payLoad := sqlserverflexbeta.CreateDatabaseRequestPayload{
+ Collation: data.Collation.ValueStringPointer(),
+ Compatibility: data.Compatibility.ValueInt64Pointer(),
+ Name: data.Name.ValueStringPointer(),
+ Owner: data.Owner.ValueStringPointer(),
+ }
+
+ createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId).
+ CreateDatabaseRequestPayload(payLoad).
+ Execute()
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ createErr,
+ fmt.Sprintf("Calling API: %v", err),
+ )
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+ createId, ok := createResp.GetIdOk()
+ if !ok {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ createErr,
+ fmt.Sprintf("Calling API: %v", err),
+ )
+ }
+
+ waitResp, err := wait.CreateDatabaseWaitHandler(
+ ctx,
+ r.client,
+ projectId,
+ instanceId,
+ region,
+ databaseName,
+ ).SetSleepBeforeWait(
+ 30 * time.Second,
+ ).SetTimeout(
+ 15 * time.Minute,
+ ).WaitWithContext(ctx)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ createErr,
+ fmt.Sprintf("Database creation waiting: %v", err),
+ )
+ return
+ }
+
+ if waitResp.Id == nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ createErr,
+ "Database creation waiting: returned id is nil",
+ )
+ return
+ }
+
+ if *waitResp.Id != createId {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ createErr,
+ "Database creation waiting: returned id is different",
+ )
+ return
+ }
+
+ if *waitResp.Owner != data.Owner.ValueString() {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ createErr,
+ "Database creation waiting: returned owner is different",
+ )
+ return
+ }
+
+ if *waitResp.Name != data.Name.ValueString() {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ createErr,
+ "Database creation waiting: returned name is different",
+ )
+ return
+ }
+
+ data.Id = types.Int64PointerValue(waitResp.Id)
+ data.Name = types.StringPointerValue(waitResp.Name)
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "sqlserverflexbeta.Database created")
+}
+
+func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data sqlserverflexbetaResGen.DatabaseModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Todo: Read API call logic
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ // TODO: Set data returned by API in identity
+ identity := DatabaseResourceIdentityModel{
+ ProjectID: types.StringValue(projectId),
+ Region: types.StringValue(region),
+ // InstanceID: types.StringValue(instanceId),
+ }
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ tflog.Info(ctx, "sqlserverflexbeta.Database read")
+}
+
+func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data sqlserverflexbetaResGen.DatabaseModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Todo: Update API call logic
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "sqlserverflexbeta.Database updated")
+}
+
+func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data sqlserverflexbetaResGen.DatabaseModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData DatabaseResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Todo: Delete API call logic
+
+ tflog.Info(ctx, "sqlserverflexbeta.Database deleted")
+}
+
+// ModifyPlan implements resource.ResourceWithModifyPlan.
+// Use the modifier to set the effective region in the current plan.
+func (r *databaseResource) ModifyPlan(
+ ctx context.Context,
+ req resource.ModifyPlanRequest,
+ resp *resource.ModifyPlanResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var configModel sqlserverflexbetaResGen.DatabaseModel
+ // skip initial empty configuration to avoid follow-up errors
+ if req.Config.Raw.IsNull() {
+ return
+ }
+ resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var planModel sqlserverflexbetaResGen.DatabaseModel
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var identityModel DatabaseResourceIdentityModel
+ identityModel.ProjectID = planModel.ProjectId
+ identityModel.Region = planModel.Region
+ // TODO: complete
+ //if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() {
+ // identityModel.InstanceID = planModel.InstanceId
+ //}
+
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+// ImportState imports a resource into the Terraform state on success.
+// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
+func (r *databaseResource) ImportState(
+ ctx context.Context,
+ req resource.ImportStateRequest,
+ resp *resource.ImportStateResponse,
+) {
+ idParts := strings.Split(req.ID, core.Separator)
+
+ // Todo: Import logic
+ if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing database",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],..., got %q",
+ req.ID,
+ ),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ // ... more ...
+
+ core.LogAndAddWarning(
+ ctx,
+ &resp.Diagnostics,
+ "Sqlserverflexbeta database imported with empty password",
+ "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
+ )
+ tflog.Info(ctx, "Sqlserverflexbeta database state imported")
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go
new file mode 100644
index 00000000..dccae0c4
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go
@@ -0,0 +1,99 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+)
+
+func DatabaseResourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "collation": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
+ MarkdownDescription: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
+ },
+ "collation_name": schema.StringAttribute{
+ Computed: true,
+ Description: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
+ MarkdownDescription: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
+ },
+ "compatibility": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "CompatibilityLevel of the Database.",
+ MarkdownDescription: "CompatibilityLevel of the Database.",
+ },
+ "compatibility_level": schema.Int64Attribute{
+ Computed: true,
+ Description: "CompatibilityLevel of the Database.",
+ MarkdownDescription: "CompatibilityLevel of the Database.",
+ },
+ "database_name": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The name of the database.",
+ MarkdownDescription: "The name of the database.",
+ },
+ "id": schema.Int64Attribute{
+ Computed: true,
+ Description: "The id of the database.",
+ MarkdownDescription: "The id of the database.",
+ },
+ "instance_id": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: "The name of the database.",
+ MarkdownDescription: "The name of the database.",
+ },
+ "owner": schema.StringAttribute{
+ Required: true,
+ Description: "The owner of the database.",
+ MarkdownDescription: "The owner of the database.",
+ },
+ "project_id": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ },
+ }
+}
+
+type DatabaseModel struct {
+ Collation types.String `tfsdk:"collation"`
+ CollationName types.String `tfsdk:"collation_name"`
+ Compatibility types.Int64 `tfsdk:"compatibility"`
+ CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"`
+ DatabaseName types.String `tfsdk:"database_name"`
+ Id types.Int64 `tfsdk:"id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Name types.String `tfsdk:"name"`
+ Owner types.String `tfsdk:"owner"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go
new file mode 100644
index 00000000..55f12a9a
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go
@@ -0,0 +1,335 @@
+package sqlserverFlexBetaFlavor
+
+import (
+ "context"
+ "fmt"
+
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+ sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen"
+)
+
+// Ensure the implementation satisfies the expected interfaces.
+var (
+ _ datasource.DataSource = &flavorDataSource{}
+ _ datasource.DataSourceWithConfigure = &flavorDataSource{}
+)
+
+type FlavorModel struct {
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ StorageClass types.String `tfsdk:"storage_class"`
+ Cpu types.Int64 `tfsdk:"cpu"`
+ Description types.String `tfsdk:"description"`
+ Id types.String `tfsdk:"id"`
+ FlavorId types.String `tfsdk:"flavor_id"`
+ MaxGb types.Int64 `tfsdk:"max_gb"`
+ Memory types.Int64 `tfsdk:"ram"`
+ MinGb types.Int64 `tfsdk:"min_gb"`
+ NodeType types.String `tfsdk:"node_type"`
+ StorageClasses types.List `tfsdk:"storage_classes"`
+}
+
+// NewFlavorDataSource is a helper function to simplify the provider implementation.
+func NewFlavorDataSource() datasource.DataSource {
+ return &flavorDataSource{}
+}
+
+// flavorDataSource is the data source implementation.
+type flavorDataSource struct {
+ client *sqlserverflexbetaPkg.APIClient
+ providerData core.ProviderData
+}
+
+// Metadata returns the data source type name.
+func (r *flavorDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavor"
+}
+
+// Configure adds the provider configured client to the data source.
+func (r *flavorDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+ var ok bool
+ r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClientConfigOptions := []config.ConfigurationOption{
+ config.WithCustomAuth(r.providerData.RoundTripper),
+ utils.UserAgentConfigOption(r.providerData.Version),
+ }
+ if r.providerData.SQLServerFlexCustomEndpoint != "" {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
+ )
+ } else {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithRegion(r.providerData.GetRegion()),
+ )
+ }
+ apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error configuring API client",
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
+ )
+ return
+ }
+ r.client = apiClient
+ tflog.Info(ctx, "Postgres Flex instance client configured")
+}
+
+func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "cpu": schema.Int64Attribute{
+ Computed: true,
+ Description: "The cpu count of the instance.",
+ MarkdownDescription: "The cpu count of the instance.",
+ },
+ "description": schema.StringAttribute{
+ Computed: true,
+ Description: "The flavor description.",
+ MarkdownDescription: "The flavor description.",
+ },
+ "id": schema.StringAttribute{
+ Computed: true,
+ Description: "The id of the instance flavor.",
+ MarkdownDescription: "The id of the instance flavor.",
+ },
+ "max_gb": schema.Int64Attribute{
+ Computed: true,
+ Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ },
+ "memory": schema.Int64Attribute{
+ Computed: true,
+ Description: "The memory of the instance in Gibibyte.",
+ MarkdownDescription: "The memory of the instance in Gibibyte.",
+ },
+ "min_gb": schema.Int64Attribute{
+ Computed: true,
+ Description: "minimum storage which is required to order in Gigabyte.",
+ MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
+ },
+ "node_type": schema.StringAttribute{
+ Computed: true,
+ Description: "defines the nodeType it can be either single or HA",
+ MarkdownDescription: "defines the nodeType it can be either single or HA",
+ },
+ "storage_classes": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "class": schema.StringAttribute{
+ Computed: true,
+ },
+ "max_io_per_sec": schema.Int64Attribute{
+ Computed: true,
+ },
+ "max_through_in_mb": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: sqlserverflexbetaGen.StorageClassesType{
+ ObjectType: types.ObjectType{
+ AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ },
+ },
+ //Attributes: map[string]schema.Attribute{
+ // "project_id": schema.StringAttribute{
+ // Required: true,
+ // Description: "The cpu count of the instance.",
+ // MarkdownDescription: "The cpu count of the instance.",
+ // },
+ // "region": schema.StringAttribute{
+ // Required: true,
+ // Description: "The flavor description.",
+ // MarkdownDescription: "The flavor description.",
+ // },
+ // "cpu": schema.Int64Attribute{
+ // Required: true,
+ // Description: "The cpu count of the instance.",
+ // MarkdownDescription: "The cpu count of the instance.",
+ // },
+ // "ram": schema.Int64Attribute{
+ // Required: true,
+ // Description: "The memory of the instance in Gibibyte.",
+ // MarkdownDescription: "The memory of the instance in Gibibyte.",
+ // },
+ // "storage_class": schema.StringAttribute{
+ // Required: true,
+ // Description: "The memory of the instance in Gibibyte.",
+ // MarkdownDescription: "The memory of the instance in Gibibyte.",
+ // },
+ // "description": schema.StringAttribute{
+ // Computed: true,
+ // Description: "The flavor description.",
+ // MarkdownDescription: "The flavor description.",
+ // },
+ // "id": schema.StringAttribute{
+ // Computed: true,
+ // Description: "The terraform id of the instance flavor.",
+ // MarkdownDescription: "The terraform id of the instance flavor.",
+ // },
+ // "flavor_id": schema.StringAttribute{
+ // Computed: true,
+ // Description: "The flavor id of the instance flavor.",
+ // MarkdownDescription: "The flavor id of the instance flavor.",
+ // },
+ // "max_gb": schema.Int64Attribute{
+ // Computed: true,
+ // Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ // MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ // },
+ // "min_gb": schema.Int64Attribute{
+ // Computed: true,
+ // Description: "minimum storage which is required to order in Gigabyte.",
+ // MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
+ // },
+ // "node_type": schema.StringAttribute{
+ // Required: true,
+ // Description: "defines the nodeType it can be either single or replica",
+ // MarkdownDescription: "defines the nodeType it can be either single or replica",
+ // },
+ // "storage_classes": schema.ListNestedAttribute{
+ // Computed: true,
+ // NestedObject: schema.NestedAttributeObject{
+ // Attributes: map[string]schema.Attribute{
+ // "class": schema.StringAttribute{
+ // Computed: true,
+ // },
+ // "max_io_per_sec": schema.Int64Attribute{
+ // Computed: true,
+ // },
+ // "max_through_in_mb": schema.Int64Attribute{
+ // Computed: true,
+ // },
+ // },
+ // CustomType: sqlserverflexalphaGen.StorageClassesType{
+ // ObjectType: types.ObjectType{
+ // AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
+ // },
+ // },
+ // },
+ // },
+ //},
+ }
+}
+
+func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var model FlavorModel
+ diags := req.Config.Get(ctx, &model)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := model.ProjectId.ValueString()
+ region := r.providerData.GetRegionWithOverride(model.Region)
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ flavors, err := getAllFlavors(ctx, r.client, projectId, region)
+ if err != nil {
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
+ return
+ }
+
+ var foundFlavors []sqlserverflexbetaPkg.ListFlavors
+ for _, flavor := range flavors {
+ if model.Cpu.ValueInt64() != *flavor.Cpu {
+ continue
+ }
+ if model.Memory.ValueInt64() != *flavor.Memory {
+ continue
+ }
+ if model.NodeType.ValueString() != *flavor.NodeType {
+ continue
+ }
+ for _, sc := range *flavor.StorageClasses {
+ if model.StorageClass.ValueString() != *sc.Class {
+ continue
+ }
+ foundFlavors = append(foundFlavors, flavor)
+ }
+ }
+ if len(foundFlavors) == 0 {
+ resp.Diagnostics.AddError("get flavor", "could not find requested flavor")
+ return
+ }
+ if len(foundFlavors) > 1 {
+ resp.Diagnostics.AddError("get flavor", "found too many matching flavors")
+ return
+ }
+
+ f := foundFlavors[0]
+ model.Description = types.StringValue(*f.Description)
+ model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
+ model.FlavorId = types.StringValue(*f.Id)
+ model.MaxGb = types.Int64Value(*f.MaxGB)
+ model.MinGb = types.Int64Value(*f.MinGB)
+
+ if f.StorageClasses == nil {
+ model.StorageClasses = types.ListNull(sqlserverflexbetaGen.StorageClassesType{
+ ObjectType: basetypes.ObjectType{
+ AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ })
+ } else {
+ var scList []attr.Value
+ for _, sc := range *f.StorageClasses {
+ scList = append(
+ scList,
+ sqlserverflexbetaGen.NewStorageClassesValueMust(
+ sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
+ map[string]attr.Value{
+ "class": types.StringValue(*sc.Class),
+ "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
+ "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
+ },
+ ),
+ )
+ }
+ storageClassesList := types.ListValueMust(
+ sqlserverflexbetaGen.StorageClassesType{
+ ObjectType: basetypes.ObjectType{
+ AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ scList,
+ )
+ model.StorageClasses = storageClassesList
+ }
+
+ // Set refreshed state
+ diags = resp.State.Set(ctx, model)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ tflog.Info(ctx, "Postgres Flex flavors read")
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go
new file mode 100644
index 00000000..a766197e
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go
@@ -0,0 +1,1909 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func FlavorDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "flavors": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "cpu": schema.Int64Attribute{
+ Computed: true,
+ Description: "The cpu count of the instance.",
+ MarkdownDescription: "The cpu count of the instance.",
+ },
+ "description": schema.StringAttribute{
+ Computed: true,
+ Description: "The flavor description.",
+ MarkdownDescription: "The flavor description.",
+ },
+ "id": schema.StringAttribute{
+ Computed: true,
+ Description: "The id of the instance flavor.",
+ MarkdownDescription: "The id of the instance flavor.",
+ },
+ "max_gb": schema.Int64Attribute{
+ Computed: true,
+ Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ },
+ "memory": schema.Int64Attribute{
+ Computed: true,
+ Description: "The memory of the instance in Gibibyte.",
+ MarkdownDescription: "The memory of the instance in Gibibyte.",
+ },
+ "min_gb": schema.Int64Attribute{
+ Computed: true,
+ Description: "minimum storage which is required to order in Gigabyte.",
+ MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
+ },
+ "node_type": schema.StringAttribute{
+ Computed: true,
+ Description: "defines the nodeType it can be either single or HA",
+ MarkdownDescription: "defines the nodeType it can be either single or HA",
+ },
+ "storage_classes": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "class": schema.StringAttribute{
+ Computed: true,
+ },
+ "max_io_per_sec": schema.Int64Attribute{
+ Computed: true,
+ },
+ "max_through_in_mb": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: StorageClassesType{
+ ObjectType: types.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ },
+ },
+ CustomType: FlavorsType{
+ ObjectType: types.ObjectType{
+ AttrTypes: FlavorsValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "List of flavors available for the project.",
+ MarkdownDescription: "List of flavors available for the project.",
+ },
+ "page": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of the page of items list to be returned.",
+ MarkdownDescription: "Number of the page of items list to be returned.",
+ },
+ "pagination": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "page": schema.Int64Attribute{
+ Computed: true,
+ },
+ "size": schema.Int64Attribute{
+ Computed: true,
+ },
+ "sort": schema.StringAttribute{
+ Computed: true,
+ },
+ "total_pages": schema.Int64Attribute{
+ Computed: true,
+ },
+ "total_rows": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: PaginationType{
+ ObjectType: types.ObjectType{
+ AttrTypes: PaginationValue{}.AttributeTypes(ctx),
+ },
+ },
+ Computed: true,
+ },
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "size": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of items to be returned on each page.",
+ MarkdownDescription: "Number of items to be returned on each page.",
+ },
+ "sort": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "Sorting of the flavors to be returned on each page.",
+ MarkdownDescription: "Sorting of the flavors to be returned on each page.",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "index.desc",
+ "index.asc",
+ "cpu.desc",
+ "cpu.asc",
+ "flavor_description.asc",
+ "flavor_description.desc",
+ "id.desc",
+ "id.asc",
+ "size_max.desc",
+ "size_max.asc",
+ "ram.desc",
+ "ram.asc",
+ "size_min.desc",
+ "size_min.asc",
+ "storage_class.asc",
+ "storage_class.desc",
+ "node_type.asc",
+ "node_type.desc",
+ ),
+ },
+ },
+ },
+ }
+}
+
+type FlavorModel struct {
+ Flavors types.List `tfsdk:"flavors"`
+ Page types.Int64 `tfsdk:"page"`
+ Pagination PaginationValue `tfsdk:"pagination"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Size types.Int64 `tfsdk:"size"`
+ Sort types.String `tfsdk:"sort"`
+}
+
+var _ basetypes.ObjectTypable = FlavorsType{}
+
+type FlavorsType struct {
+ basetypes.ObjectType
+}
+
+func (t FlavorsType) Equal(o attr.Type) bool {
+ other, ok := o.(FlavorsType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t FlavorsType) String() string {
+ return "FlavorsType"
+}
+
+func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ cpuAttribute, ok := attributes["cpu"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `cpu is missing from object`)
+
+ return nil, diags
+ }
+
+ cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ }
+
+ descriptionAttribute, ok := attributes["description"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `description is missing from object`)
+
+ return nil, diags
+ }
+
+ descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return nil, diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
+ }
+
+ maxGbAttribute, ok := attributes["max_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_gb is missing from object`)
+
+ return nil, diags
+ }
+
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ }
+
+ memoryAttribute, ok := attributes["memory"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `memory is missing from object`)
+
+ return nil, diags
+ }
+
+ memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ }
+
+ minGbAttribute, ok := attributes["min_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `min_gb is missing from object`)
+
+ return nil, diags
+ }
+
+ minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ }
+
+ nodeTypeAttribute, ok := attributes["node_type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `node_type is missing from object`)
+
+ return nil, diags
+ }
+
+ nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
+ }
+
+ storageClassesAttribute, ok := attributes["storage_classes"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `storage_classes is missing from object`)
+
+ return nil, diags
+ }
+
+ storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return FlavorsValue{
+ Cpu: cpuVal,
+ Description: descriptionVal,
+ Id: idVal,
+ MaxGb: maxGbVal,
+ Memory: memoryVal,
+ MinGb: minGbVal,
+ NodeType: nodeTypeVal,
+ StorageClasses: storageClassesVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewFlavorsValueNull() FlavorsValue {
+ return FlavorsValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewFlavorsValueUnknown() FlavorsValue {
+ return FlavorsValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing FlavorsValue Attribute Value",
+ "While creating a FlavorsValue value, a missing attribute value was detected. "+
+ "A FlavorsValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid FlavorsValue Attribute Type",
+ "While creating a FlavorsValue value, an invalid attribute value was detected. "+
+ "A FlavorsValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra FlavorsValue Attribute Value",
+ "While creating a FlavorsValue value, an extra attribute value was detected. "+
+ "A FlavorsValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ cpuAttribute, ok := attributes["cpu"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `cpu is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ }
+
+ descriptionAttribute, ok := attributes["description"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `description is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
+ }
+
+ maxGbAttribute, ok := attributes["max_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_gb is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ }
+
+ memoryAttribute, ok := attributes["memory"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `memory is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ }
+
+ minGbAttribute, ok := attributes["min_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `min_gb is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ }
+
+ nodeTypeAttribute, ok := attributes["node_type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `node_type is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
+ }
+
+ storageClassesAttribute, ok := attributes["storage_classes"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `storage_classes is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
+ }
+
+ if diags.HasError() {
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ return FlavorsValue{
+ Cpu: cpuVal,
+ Description: descriptionVal,
+ Id: idVal,
+ MaxGb: maxGbVal,
+ Memory: memoryVal,
+ MinGb: minGbVal,
+ NodeType: nodeTypeVal,
+ StorageClasses: storageClassesVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue {
+ object, diags := NewFlavorsValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewFlavorsValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewFlavorsValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewFlavorsValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
+ return FlavorsValue{}
+}
+
+var _ basetypes.ObjectValuable = FlavorsValue{}
+
+type FlavorsValue struct {
+ Cpu basetypes.Int64Value `tfsdk:"cpu"`
+ Description basetypes.StringValue `tfsdk:"description"`
+ Id basetypes.StringValue `tfsdk:"id"`
+ MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
+ Memory basetypes.Int64Value `tfsdk:"memory"`
+ MinGb basetypes.Int64Value `tfsdk:"min_gb"`
+ NodeType basetypes.StringValue `tfsdk:"node_type"`
+ StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
+ state attr.ValueState
+}
+
+func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 8)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["storage_classes"] = basetypes.ListType{
+ ElemType: StorageClassesValue{}.Type(ctx),
+ }.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 8)
+
+ val, err = v.Cpu.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["cpu"] = val
+
+ val, err = v.Description.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["description"] = val
+
+ val, err = v.Id.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["id"] = val
+
+ val, err = v.MaxGb.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["max_gb"] = val
+
+ val, err = v.Memory.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["memory"] = val
+
+ val, err = v.MinGb.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["min_gb"] = val
+
+ val, err = v.NodeType.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["node_type"] = val
+
+ val, err = v.StorageClasses.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["storage_classes"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v FlavorsValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v FlavorsValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v FlavorsValue) String() string {
+ return "FlavorsValue"
+}
+
+func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ storageClasses := types.ListValueMust(
+ StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ v.StorageClasses.Elements(),
+ )
+
+ if v.StorageClasses.IsNull() {
+ storageClasses = types.ListNull(
+ StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ )
+ }
+
+ if v.StorageClasses.IsUnknown() {
+ storageClasses = types.ListUnknown(
+ StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ )
+ }
+
+ attributeTypes := map[string]attr.Type{
+ "cpu": basetypes.Int64Type{},
+ "description": basetypes.StringType{},
+ "id": basetypes.StringType{},
+ "max_gb": basetypes.Int64Type{},
+ "memory": basetypes.Int64Type{},
+ "min_gb": basetypes.Int64Type{},
+ "node_type": basetypes.StringType{},
+ "storage_classes": basetypes.ListType{
+ ElemType: StorageClassesValue{}.Type(ctx),
+ },
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "cpu": v.Cpu,
+ "description": v.Description,
+ "id": v.Id,
+ "max_gb": v.MaxGb,
+ "memory": v.Memory,
+ "min_gb": v.MinGb,
+ "node_type": v.NodeType,
+ "storage_classes": storageClasses,
+ })
+
+ return objVal, diags
+}
+
+func (v FlavorsValue) Equal(o attr.Value) bool {
+ other, ok := o.(FlavorsValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Cpu.Equal(other.Cpu) {
+ return false
+ }
+
+ if !v.Description.Equal(other.Description) {
+ return false
+ }
+
+ if !v.Id.Equal(other.Id) {
+ return false
+ }
+
+ if !v.MaxGb.Equal(other.MaxGb) {
+ return false
+ }
+
+ if !v.Memory.Equal(other.Memory) {
+ return false
+ }
+
+ if !v.MinGb.Equal(other.MinGb) {
+ return false
+ }
+
+ if !v.NodeType.Equal(other.NodeType) {
+ return false
+ }
+
+ if !v.StorageClasses.Equal(other.StorageClasses) {
+ return false
+ }
+
+ return true
+}
+
+func (v FlavorsValue) Type(ctx context.Context) attr.Type {
+ return FlavorsType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "cpu": basetypes.Int64Type{},
+ "description": basetypes.StringType{},
+ "id": basetypes.StringType{},
+ "max_gb": basetypes.Int64Type{},
+ "memory": basetypes.Int64Type{},
+ "min_gb": basetypes.Int64Type{},
+ "node_type": basetypes.StringType{},
+ "storage_classes": basetypes.ListType{
+ ElemType: StorageClassesValue{}.Type(ctx),
+ },
+ }
+}
+
+var _ basetypes.ObjectTypable = StorageClassesType{}
+
+type StorageClassesType struct {
+ basetypes.ObjectType
+}
+
+func (t StorageClassesType) Equal(o attr.Type) bool {
+ other, ok := o.(StorageClassesType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t StorageClassesType) String() string {
+ return "StorageClassesType"
+}
+
+func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return nil, diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_io_per_sec is missing from object`)
+
+ return nil, diags
+ }
+
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ }
+
+ maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_through_in_mb is missing from object`)
+
+ return nil, diags
+ }
+
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return StorageClassesValue{
+ Class: classVal,
+ MaxIoPerSec: maxIoPerSecVal,
+ MaxThroughInMb: maxThroughInMbVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageClassesValueNull() StorageClassesValue {
+ return StorageClassesValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewStorageClassesValueUnknown() StorageClassesValue {
+ return StorageClassesValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing StorageClassesValue Attribute Value",
+ "While creating a StorageClassesValue value, a missing attribute value was detected. "+
+ "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid StorageClassesValue Attribute Type",
+ "While creating a StorageClassesValue value, an invalid attribute value was detected. "+
+ "A StorageClassesValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra StorageClassesValue Attribute Value",
+ "While creating a StorageClassesValue value, an extra attribute value was detected. "+
+ "A StorageClassesValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_io_per_sec is missing from object`)
+
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ }
+
+ maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_through_in_mb is missing from object`)
+
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ }
+
+ if diags.HasError() {
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ return StorageClassesValue{
+ Class: classVal,
+ MaxIoPerSec: maxIoPerSecVal,
+ MaxThroughInMb: maxThroughInMbVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue {
+ object, diags := NewStorageClassesValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewStorageClassesValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewStorageClassesValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewStorageClassesValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t StorageClassesType) ValueType(ctx context.Context) attr.Value {
+ return StorageClassesValue{}
+}
+
+var _ basetypes.ObjectValuable = StorageClassesValue{}
+
+type StorageClassesValue struct {
+ Class basetypes.StringValue `tfsdk:"class"`
+ MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
+ MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
+ state attr.ValueState
+}
+
+func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 3)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 3)
+
+ val, err = v.Class.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["class"] = val
+
+ val, err = v.MaxIoPerSec.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["max_io_per_sec"] = val
+
+ val, err = v.MaxThroughInMb.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["max_through_in_mb"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v StorageClassesValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v StorageClassesValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v StorageClassesValue) String() string {
+ return "StorageClassesValue"
+}
+
+func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "max_io_per_sec": basetypes.Int64Type{},
+ "max_through_in_mb": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "class": v.Class,
+ "max_io_per_sec": v.MaxIoPerSec,
+ "max_through_in_mb": v.MaxThroughInMb,
+ })
+
+ return objVal, diags
+}
+
+func (v StorageClassesValue) Equal(o attr.Value) bool {
+ other, ok := o.(StorageClassesValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Class.Equal(other.Class) {
+ return false
+ }
+
+ if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) {
+ return false
+ }
+
+ if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) {
+ return false
+ }
+
+ return true
+}
+
+func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
+ return StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "max_io_per_sec": basetypes.Int64Type{},
+ "max_through_in_mb": basetypes.Int64Type{},
+ }
+}
+
+var _ basetypes.ObjectTypable = PaginationType{}
+
+type PaginationType struct {
+ basetypes.ObjectType
+}
+
+func (t PaginationType) Equal(o attr.Type) bool {
+ other, ok := o.(PaginationType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t PaginationType) String() string {
+ return "PaginationType"
+}
+
+func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return nil, diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return nil, diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return nil, diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return nil, diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return nil, diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueNull() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewPaginationValueUnknown() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing PaginationValue Attribute Value",
+ "While creating a PaginationValue value, a missing attribute value was detected. "+
+ "A PaginationValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid PaginationValue Attribute Type",
+ "While creating a PaginationValue value, an invalid attribute value was detected. "+
+ "A PaginationValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra PaginationValue Attribute Value",
+ "While creating a PaginationValue value, an extra attribute value was detected. "+
+ "A PaginationValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
+ object, diags := NewPaginationValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewPaginationValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewPaginationValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewPaginationValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t PaginationType) ValueType(ctx context.Context) attr.Value {
+ return PaginationValue{}
+}
+
+var _ basetypes.ObjectValuable = PaginationValue{}
+
+type PaginationValue struct {
+ Page basetypes.Int64Value `tfsdk:"page"`
+ Size basetypes.Int64Value `tfsdk:"size"`
+ Sort basetypes.StringValue `tfsdk:"sort"`
+ TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ state attr.ValueState
+}
+
+func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 5)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 5)
+
+ val, err = v.Page.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["page"] = val
+
+ val, err = v.Size.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["size"] = val
+
+ val, err = v.Sort.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["sort"] = val
+
+ val, err = v.TotalPages.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_pages"] = val
+
+ val, err = v.TotalRows.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_rows"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v PaginationValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v PaginationValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v PaginationValue) String() string {
+ return "PaginationValue"
+}
+
+func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "page": v.Page,
+ "size": v.Size,
+ "sort": v.Sort,
+ "total_pages": v.TotalPages,
+ "total_rows": v.TotalRows,
+ })
+
+ return objVal, diags
+}
+
+func (v PaginationValue) Equal(o attr.Value) bool {
+ other, ok := o.(PaginationValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Page.Equal(other.Page) {
+ return false
+ }
+
+ if !v.Size.Equal(other.Size) {
+ return false
+ }
+
+ if !v.Sort.Equal(other.Sort) {
+ return false
+ }
+
+ if !v.TotalPages.Equal(other.TotalPages) {
+ return false
+ }
+
+ if !v.TotalRows.Equal(other.TotalRows) {
+ return false
+ }
+
+ return true
+}
+
+func (v PaginationValue) Type(ctx context.Context) attr.Type {
+ return PaginationType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/functions.go b/stackit/internal/services/sqlserverflexbeta/flavor/functions.go
new file mode 100644
index 00000000..8c06da73
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/flavor/functions.go
@@ -0,0 +1,65 @@
+package sqlserverFlexBetaFlavor
+
+import (
+ "context"
+ "fmt"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+)
+
+type flavorsClientReader interface {
+ GetFlavorsRequest(
+ ctx context.Context,
+ projectId, region string,
+ ) sqlserverflexbeta.ApiGetFlavorsRequestRequest
+}
+
+func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
+ []sqlserverflexbeta.ListFlavors,
+ error,
+) {
+ getAllFilter := func(_ sqlserverflexbeta.ListFlavors) bool { return true }
+ flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
+ if err != nil {
+ return nil, err
+ }
+ return flavorList, nil
+}
+
+// getFlavorsByFilter is a helper function to retrieve flavors using a filtern function.
+// Hint: The API does not have a GetFlavors endpoint, only ListFlavors
+func getFlavorsByFilter(
+ ctx context.Context,
+ client flavorsClientReader,
+ projectId, region string,
+ filter func(db sqlserverflexbeta.ListFlavors) bool,
+) ([]sqlserverflexbeta.ListFlavors, error) {
+ if projectId == "" || region == "" {
+ return nil, fmt.Errorf("listing sqlserverflexbeta flavors: projectId and region are required")
+ }
+
+ const pageSize = 25
+
+ var result = make([]sqlserverflexbeta.ListFlavors, 0)
+
+ for page := int64(1); ; page++ {
+ res, err := client.GetFlavorsRequest(ctx, projectId, region).
+ Page(page).Size(pageSize).Sort(sqlserverflexbeta.FLAVORSORT_INDEX_ASC).Execute()
+ if err != nil {
+ return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
+ }
+
+ // If the API returns no flavors, we have reached the end of the list.
+ if res.Flavors == nil || len(*res.Flavors) == 0 {
+ break
+ }
+
+ for _, flavor := range *res.Flavors {
+ if filter(flavor) {
+ result = append(result, flavor)
+ }
+ }
+ }
+
+ return result, nil
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go b/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go
new file mode 100644
index 00000000..974f1fa4
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go
@@ -0,0 +1,134 @@
+package sqlserverFlexBetaFlavor
+
+import (
+ "context"
+ "testing"
+
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+)
+
+type mockRequest struct {
+ executeFunc func() (*sqlserverflexbeta.GetFlavorsResponse, error)
+}
+
+func (m *mockRequest) Page(_ int64) sqlserverflexbeta.ApiGetFlavorsRequestRequest { return m }
+func (m *mockRequest) Size(_ int64) sqlserverflexbeta.ApiGetFlavorsRequestRequest { return m }
+func (m *mockRequest) Sort(_ sqlserverflexbeta.FlavorSort) sqlserverflexbeta.ApiGetFlavorsRequestRequest {
+ return m
+}
+func (m *mockRequest) Execute() (*sqlserverflexbeta.GetFlavorsResponse, error) {
+ return m.executeFunc()
+}
+
+type mockFlavorsClient struct {
+ executeRequest func() sqlserverflexbeta.ApiGetFlavorsRequestRequest
+}
+
+func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) sqlserverflexbeta.ApiGetFlavorsRequestRequest {
+ return m.executeRequest()
+}
+
+var mockResp = func(page int64) (*sqlserverflexbeta.GetFlavorsResponse, error) {
+ if page == 1 {
+ return &sqlserverflexbeta.GetFlavorsResponse{
+ Flavors: &[]sqlserverflexbeta.ListFlavors{
+ {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
+ {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
+ },
+ }, nil
+ }
+ if page == 2 {
+ return &sqlserverflexbeta.GetFlavorsResponse{
+ Flavors: &[]sqlserverflexbeta.ListFlavors{
+ {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
+ },
+ }, nil
+ }
+
+ return &sqlserverflexbeta.GetFlavorsResponse{
+ Flavors: &[]sqlserverflexbeta.ListFlavors{},
+ }, nil
+}
+
+func TestGetFlavorsByFilter(t *testing.T) {
+ tests := []struct {
+ description string
+ projectId string
+ region string
+ mockErr error
+ filter func(sqlserverflexbeta.ListFlavors) bool
+ wantCount int
+ wantErr bool
+ }{
+ {
+ description: "Success - Get all flavors (2 pages)",
+ projectId: "pid", region: "reg",
+ filter: func(_ sqlserverflexbeta.ListFlavors) bool { return true },
+ wantCount: 3,
+ wantErr: false,
+ },
+ {
+ description: "Success - Filter flavors by description",
+ projectId: "pid", region: "reg",
+ filter: func(f sqlserverflexbeta.ListFlavors) bool { return *f.Description == "first" },
+ wantCount: 1,
+ wantErr: false,
+ },
+ {
+ description: "Error - Missing parameters",
+ projectId: "", region: "reg",
+ wantErr: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ var currentPage int64
+ client := &mockFlavorsClient{
+ executeRequest: func() sqlserverflexbeta.ApiGetFlavorsRequestRequest {
+ return &mockRequest{
+ executeFunc: func() (*sqlserverflexbeta.GetFlavorsResponse, error) {
+ currentPage++
+ return mockResp(currentPage)
+ },
+ }
+ },
+ }
+ actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter)
+
+ if (err != nil) != tt.wantErr {
+ t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+
+ if !tt.wantErr && len(actual) != tt.wantCount {
+ t.Errorf("getFlavorsByFilter() got %d flavors, want %d", len(actual), tt.wantCount)
+ }
+ },
+ )
+ }
+}
+
+func TestGetAllFlavors(t *testing.T) {
+ var currentPage int64
+ client := &mockFlavorsClient{
+ executeRequest: func() sqlserverflexbeta.ApiGetFlavorsRequestRequest {
+ return &mockRequest{
+ executeFunc: func() (*sqlserverflexbeta.GetFlavorsResponse, error) {
+ currentPage++
+ return mockResp(currentPage)
+ },
+ }
+ },
+ }
+
+ res, err := getAllFlavors(context.Background(), client, "pid", "reg")
+ if err != nil {
+ t.Errorf("getAllFlavors() unexpected error: %v", err)
+ }
+ if len(res) != 3 {
+ t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go
new file mode 100644
index 00000000..41b1aad8
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go
@@ -0,0 +1,118 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+
+ sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils"
+
+ sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen"
+)
+
+var _ datasource.DataSource = (*flavorsDataSource)(nil)
+
+const errorPrefix = "[Sqlserverflexbeta - Flavors]"
+
+func NewFlavorsDataSource() datasource.DataSource {
+ return &flavorsDataSource{}
+}
+
+type flavorsDataSource struct {
+ client *sqlserverflexbetaPkg.APIClient
+ providerData core.ProviderData
+}
+
+func (d *flavorsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavors"
+}
+
+func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = sqlserverflexbetaGen.FlavorsDataSourceSchema(ctx)
+}
+
+// Configure adds the provider configured client to the data source.
+func (d *flavorsDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
+ var ok bool
+ d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ d.client = apiClient
+ tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
+}
+
+func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data sqlserverflexbetaGen.FlavorsModel
+
+ // Read Terraform configuration data into the model
+ resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := data.ProjectId.ValueString()
+ region := d.providerData.GetRegionWithOverride(data.Region)
+ flavorsId := data.FlavorsId.ValueString()
+
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "flavors_id", flavorsId)
+
+ flavorsResp, err := d.client.GetFlavorsRequest(ctx, projectId, region, flavorsId).Execute()
+ if err != nil {
+ utils.LogError(
+ ctx,
+ &resp.Diagnostics,
+ err,
+ "Reading flavors",
+ fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId),
+ map[int]string{
+ http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
+ },
+ )
+ resp.State.RemoveResource(ctx)
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ // Todo: Read API call logic
+
+ // Example data value setting
+ // data.Id = types.StringValue("example-id")
+
+ err = mapResponseToModel(ctx, flavorsResp, &data, resp.Diagnostics)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ fmt.Sprintf("%s Read", errorPrefix),
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
+ return
+ }
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go
new file mode 100644
index 00000000..94b526be
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go
@@ -0,0 +1,1909 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "flavors": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "cpu": schema.Int64Attribute{
+ Computed: true,
+ Description: "The cpu count of the instance.",
+ MarkdownDescription: "The cpu count of the instance.",
+ },
+ "description": schema.StringAttribute{
+ Computed: true,
+ Description: "The flavor description.",
+ MarkdownDescription: "The flavor description.",
+ },
+ "id": schema.StringAttribute{
+ Computed: true,
+ Description: "The id of the instance flavor.",
+ MarkdownDescription: "The id of the instance flavor.",
+ },
+ "max_gb": schema.Int64Attribute{
+ Computed: true,
+ Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ },
+ "memory": schema.Int64Attribute{
+ Computed: true,
+ Description: "The memory of the instance in Gibibyte.",
+ MarkdownDescription: "The memory of the instance in Gibibyte.",
+ },
+ "min_gb": schema.Int64Attribute{
+ Computed: true,
+ Description: "minimum storage which is required to order in Gigabyte.",
+ MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
+ },
+ "node_type": schema.StringAttribute{
+ Computed: true,
+ Description: "defines the nodeType it can be either single or HA",
+ MarkdownDescription: "defines the nodeType it can be either single or HA",
+ },
+ "storage_classes": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "class": schema.StringAttribute{
+ Computed: true,
+ },
+ "max_io_per_sec": schema.Int64Attribute{
+ Computed: true,
+ },
+ "max_through_in_mb": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: StorageClassesType{
+ ObjectType: types.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
+ },
+ },
+ CustomType: FlavorsType{
+ ObjectType: types.ObjectType{
+ AttrTypes: FlavorsValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "List of flavors available for the project.",
+ MarkdownDescription: "List of flavors available for the project.",
+ },
+ "page": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of the page of items list to be returned.",
+ MarkdownDescription: "Number of the page of items list to be returned.",
+ },
+ "pagination": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "page": schema.Int64Attribute{
+ Computed: true,
+ },
+ "size": schema.Int64Attribute{
+ Computed: true,
+ },
+ "sort": schema.StringAttribute{
+ Computed: true,
+ },
+ "total_pages": schema.Int64Attribute{
+ Computed: true,
+ },
+ "total_rows": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: PaginationType{
+ ObjectType: types.ObjectType{
+ AttrTypes: PaginationValue{}.AttributeTypes(ctx),
+ },
+ },
+ Computed: true,
+ },
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "size": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of items to be returned on each page.",
+ MarkdownDescription: "Number of items to be returned on each page.",
+ },
+ "sort": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "Sorting of the flavors to be returned on each page.",
+ MarkdownDescription: "Sorting of the flavors to be returned on each page.",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "index.desc",
+ "index.asc",
+ "cpu.desc",
+ "cpu.asc",
+ "flavor_description.asc",
+ "flavor_description.desc",
+ "id.desc",
+ "id.asc",
+ "size_max.desc",
+ "size_max.asc",
+ "ram.desc",
+ "ram.asc",
+ "size_min.desc",
+ "size_min.asc",
+ "storage_class.asc",
+ "storage_class.desc",
+ "node_type.asc",
+ "node_type.desc",
+ ),
+ },
+ },
+ },
+ }
+}
+
+type FlavorsModel struct {
+ Flavors types.List `tfsdk:"flavors"`
+ Page types.Int64 `tfsdk:"page"`
+ Pagination PaginationValue `tfsdk:"pagination"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Size types.Int64 `tfsdk:"size"`
+ Sort types.String `tfsdk:"sort"`
+}
+
+var _ basetypes.ObjectTypable = FlavorsType{}
+
+type FlavorsType struct {
+ basetypes.ObjectType
+}
+
+func (t FlavorsType) Equal(o attr.Type) bool {
+ other, ok := o.(FlavorsType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t FlavorsType) String() string {
+ return "FlavorsType"
+}
+
+func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ cpuAttribute, ok := attributes["cpu"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `cpu is missing from object`)
+
+ return nil, diags
+ }
+
+ cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ }
+
+ descriptionAttribute, ok := attributes["description"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `description is missing from object`)
+
+ return nil, diags
+ }
+
+ descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return nil, diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
+ }
+
+ maxGbAttribute, ok := attributes["max_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_gb is missing from object`)
+
+ return nil, diags
+ }
+
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ }
+
+ memoryAttribute, ok := attributes["memory"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `memory is missing from object`)
+
+ return nil, diags
+ }
+
+ memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ }
+
+ minGbAttribute, ok := attributes["min_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `min_gb is missing from object`)
+
+ return nil, diags
+ }
+
+ minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ }
+
+ nodeTypeAttribute, ok := attributes["node_type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `node_type is missing from object`)
+
+ return nil, diags
+ }
+
+ nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
+ }
+
+ storageClassesAttribute, ok := attributes["storage_classes"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `storage_classes is missing from object`)
+
+ return nil, diags
+ }
+
+ storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return FlavorsValue{
+ Cpu: cpuVal,
+ Description: descriptionVal,
+ Id: idVal,
+ MaxGb: maxGbVal,
+ Memory: memoryVal,
+ MinGb: minGbVal,
+ NodeType: nodeTypeVal,
+ StorageClasses: storageClassesVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewFlavorsValueNull() FlavorsValue {
+ return FlavorsValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewFlavorsValueUnknown() FlavorsValue {
+ return FlavorsValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing FlavorsValue Attribute Value",
+ "While creating a FlavorsValue value, a missing attribute value was detected. "+
+ "A FlavorsValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid FlavorsValue Attribute Type",
+ "While creating a FlavorsValue value, an invalid attribute value was detected. "+
+ "A FlavorsValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra FlavorsValue Attribute Value",
+ "While creating a FlavorsValue value, an extra attribute value was detected. "+
+ "A FlavorsValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ cpuAttribute, ok := attributes["cpu"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `cpu is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
+ }
+
+ descriptionAttribute, ok := attributes["description"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `description is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
+ }
+
+ maxGbAttribute, ok := attributes["max_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_gb is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
+ }
+
+ memoryAttribute, ok := attributes["memory"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `memory is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
+ }
+
+ minGbAttribute, ok := attributes["min_gb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `min_gb is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
+ }
+
+ nodeTypeAttribute, ok := attributes["node_type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `node_type is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
+ }
+
+ storageClassesAttribute, ok := attributes["storage_classes"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `storage_classes is missing from object`)
+
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
+ }
+
+ if diags.HasError() {
+ return NewFlavorsValueUnknown(), diags
+ }
+
+ return FlavorsValue{
+ Cpu: cpuVal,
+ Description: descriptionVal,
+ Id: idVal,
+ MaxGb: maxGbVal,
+ Memory: memoryVal,
+ MinGb: minGbVal,
+ NodeType: nodeTypeVal,
+ StorageClasses: storageClassesVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue {
+ object, diags := NewFlavorsValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewFlavorsValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewFlavorsValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewFlavorsValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
+ return FlavorsValue{}
+}
+
+var _ basetypes.ObjectValuable = FlavorsValue{}
+
+type FlavorsValue struct {
+ Cpu basetypes.Int64Value `tfsdk:"cpu"`
+ Description basetypes.StringValue `tfsdk:"description"`
+ Id basetypes.StringValue `tfsdk:"id"`
+ MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
+ Memory basetypes.Int64Value `tfsdk:"memory"`
+ MinGb basetypes.Int64Value `tfsdk:"min_gb"`
+ NodeType basetypes.StringValue `tfsdk:"node_type"`
+ StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
+ state attr.ValueState
+}
+
+func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 8)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["storage_classes"] = basetypes.ListType{
+ ElemType: StorageClassesValue{}.Type(ctx),
+ }.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 8)
+
+ val, err = v.Cpu.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["cpu"] = val
+
+ val, err = v.Description.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["description"] = val
+
+ val, err = v.Id.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["id"] = val
+
+ val, err = v.MaxGb.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["max_gb"] = val
+
+ val, err = v.Memory.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["memory"] = val
+
+ val, err = v.MinGb.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["min_gb"] = val
+
+ val, err = v.NodeType.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["node_type"] = val
+
+ val, err = v.StorageClasses.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["storage_classes"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v FlavorsValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v FlavorsValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v FlavorsValue) String() string {
+ return "FlavorsValue"
+}
+
+func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ storageClasses := types.ListValueMust(
+ StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ v.StorageClasses.Elements(),
+ )
+
+ if v.StorageClasses.IsNull() {
+ storageClasses = types.ListNull(
+ StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ )
+ }
+
+ if v.StorageClasses.IsUnknown() {
+ storageClasses = types.ListUnknown(
+ StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
+ },
+ },
+ )
+ }
+
+ attributeTypes := map[string]attr.Type{
+ "cpu": basetypes.Int64Type{},
+ "description": basetypes.StringType{},
+ "id": basetypes.StringType{},
+ "max_gb": basetypes.Int64Type{},
+ "memory": basetypes.Int64Type{},
+ "min_gb": basetypes.Int64Type{},
+ "node_type": basetypes.StringType{},
+ "storage_classes": basetypes.ListType{
+ ElemType: StorageClassesValue{}.Type(ctx),
+ },
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "cpu": v.Cpu,
+ "description": v.Description,
+ "id": v.Id,
+ "max_gb": v.MaxGb,
+ "memory": v.Memory,
+ "min_gb": v.MinGb,
+ "node_type": v.NodeType,
+ "storage_classes": storageClasses,
+ })
+
+ return objVal, diags
+}
+
+func (v FlavorsValue) Equal(o attr.Value) bool {
+ other, ok := o.(FlavorsValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Cpu.Equal(other.Cpu) {
+ return false
+ }
+
+ if !v.Description.Equal(other.Description) {
+ return false
+ }
+
+ if !v.Id.Equal(other.Id) {
+ return false
+ }
+
+ if !v.MaxGb.Equal(other.MaxGb) {
+ return false
+ }
+
+ if !v.Memory.Equal(other.Memory) {
+ return false
+ }
+
+ if !v.MinGb.Equal(other.MinGb) {
+ return false
+ }
+
+ if !v.NodeType.Equal(other.NodeType) {
+ return false
+ }
+
+ if !v.StorageClasses.Equal(other.StorageClasses) {
+ return false
+ }
+
+ return true
+}
+
+func (v FlavorsValue) Type(ctx context.Context) attr.Type {
+ return FlavorsType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "cpu": basetypes.Int64Type{},
+ "description": basetypes.StringType{},
+ "id": basetypes.StringType{},
+ "max_gb": basetypes.Int64Type{},
+ "memory": basetypes.Int64Type{},
+ "min_gb": basetypes.Int64Type{},
+ "node_type": basetypes.StringType{},
+ "storage_classes": basetypes.ListType{
+ ElemType: StorageClassesValue{}.Type(ctx),
+ },
+ }
+}
+
+var _ basetypes.ObjectTypable = StorageClassesType{}
+
+type StorageClassesType struct {
+ basetypes.ObjectType
+}
+
+func (t StorageClassesType) Equal(o attr.Type) bool {
+ other, ok := o.(StorageClassesType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t StorageClassesType) String() string {
+ return "StorageClassesType"
+}
+
+func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return nil, diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_io_per_sec is missing from object`)
+
+ return nil, diags
+ }
+
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ }
+
+ maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_through_in_mb is missing from object`)
+
+ return nil, diags
+ }
+
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return StorageClassesValue{
+ Class: classVal,
+ MaxIoPerSec: maxIoPerSecVal,
+ MaxThroughInMb: maxThroughInMbVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageClassesValueNull() StorageClassesValue {
+ return StorageClassesValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewStorageClassesValueUnknown() StorageClassesValue {
+ return StorageClassesValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing StorageClassesValue Attribute Value",
+ "While creating a StorageClassesValue value, a missing attribute value was detected. "+
+ "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid StorageClassesValue Attribute Type",
+ "While creating a StorageClassesValue value, an invalid attribute value was detected. "+
+ "A StorageClassesValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra StorageClassesValue Attribute Value",
+ "While creating a StorageClassesValue value, an extra attribute value was detected. "+
+ "A StorageClassesValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_io_per_sec is missing from object`)
+
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
+ }
+
+ maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `max_through_in_mb is missing from object`)
+
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
+ }
+
+ if diags.HasError() {
+ return NewStorageClassesValueUnknown(), diags
+ }
+
+ return StorageClassesValue{
+ Class: classVal,
+ MaxIoPerSec: maxIoPerSecVal,
+ MaxThroughInMb: maxThroughInMbVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue {
+ object, diags := NewStorageClassesValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewStorageClassesValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewStorageClassesValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewStorageClassesValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t StorageClassesType) ValueType(ctx context.Context) attr.Value {
+ return StorageClassesValue{}
+}
+
+var _ basetypes.ObjectValuable = StorageClassesValue{}
+
+type StorageClassesValue struct {
+ Class basetypes.StringValue `tfsdk:"class"`
+ MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
+ MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
+ state attr.ValueState
+}
+
+func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 3)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 3)
+
+ val, err = v.Class.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["class"] = val
+
+ val, err = v.MaxIoPerSec.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["max_io_per_sec"] = val
+
+ val, err = v.MaxThroughInMb.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["max_through_in_mb"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v StorageClassesValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v StorageClassesValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v StorageClassesValue) String() string {
+ return "StorageClassesValue"
+}
+
+func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "max_io_per_sec": basetypes.Int64Type{},
+ "max_through_in_mb": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "class": v.Class,
+ "max_io_per_sec": v.MaxIoPerSec,
+ "max_through_in_mb": v.MaxThroughInMb,
+ })
+
+ return objVal, diags
+}
+
+func (v StorageClassesValue) Equal(o attr.Value) bool {
+ other, ok := o.(StorageClassesValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Class.Equal(other.Class) {
+ return false
+ }
+
+ if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) {
+ return false
+ }
+
+ if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) {
+ return false
+ }
+
+ return true
+}
+
+func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
+ return StorageClassesType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "max_io_per_sec": basetypes.Int64Type{},
+ "max_through_in_mb": basetypes.Int64Type{},
+ }
+}
+
+var _ basetypes.ObjectTypable = PaginationType{}
+
+type PaginationType struct {
+ basetypes.ObjectType
+}
+
+func (t PaginationType) Equal(o attr.Type) bool {
+ other, ok := o.(PaginationType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t PaginationType) String() string {
+ return "PaginationType"
+}
+
+func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return nil, diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return nil, diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return nil, diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return nil, diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return nil, diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueNull() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewPaginationValueUnknown() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing PaginationValue Attribute Value",
+ "While creating a PaginationValue value, a missing attribute value was detected. "+
+ "A PaginationValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid PaginationValue Attribute Type",
+ "While creating a PaginationValue value, an invalid attribute value was detected. "+
+ "A PaginationValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra PaginationValue Attribute Value",
+ "While creating a PaginationValue value, an extra attribute value was detected. "+
+ "A PaginationValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
+ object, diags := NewPaginationValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewPaginationValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewPaginationValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewPaginationValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t PaginationType) ValueType(ctx context.Context) attr.Value {
+ return PaginationValue{}
+}
+
+var _ basetypes.ObjectValuable = PaginationValue{}
+
+type PaginationValue struct {
+ Page basetypes.Int64Value `tfsdk:"page"`
+ Size basetypes.Int64Value `tfsdk:"size"`
+ Sort basetypes.StringValue `tfsdk:"sort"`
+ TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ state attr.ValueState
+}
+
+func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 5)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 5)
+
+ val, err = v.Page.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["page"] = val
+
+ val, err = v.Size.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["size"] = val
+
+ val, err = v.Sort.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["sort"] = val
+
+ val, err = v.TotalPages.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_pages"] = val
+
+ val, err = v.TotalRows.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_rows"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v PaginationValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v PaginationValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v PaginationValue) String() string {
+ return "PaginationValue"
+}
+
+func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "page": v.Page,
+ "size": v.Size,
+ "sort": v.Sort,
+ "total_pages": v.TotalPages,
+ "total_rows": v.TotalRows,
+ })
+
+ return objVal, diags
+}
+
+func (v PaginationValue) Equal(o attr.Value) bool {
+ other, ok := o.(PaginationValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Page.Equal(other.Page) {
+ return false
+ }
+
+ if !v.Size.Equal(other.Size) {
+ return false
+ }
+
+ if !v.Sort.Equal(other.Sort) {
+ return false
+ }
+
+ if !v.TotalPages.Equal(other.TotalPages) {
+ return false
+ }
+
+ if !v.TotalRows.Equal(other.TotalRows) {
+ return false
+ }
+
+ return true
+}
+
+func (v PaginationValue) Type(ctx context.Context) attr.Type {
+ return PaginationType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go
new file mode 100644
index 00000000..842a4cfd
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go
@@ -0,0 +1,134 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+
+ sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen"
+)
+
+var _ datasource.DataSource = (*instanceDataSource)(nil)
+
+const errorPrefix = "[Sqlserverflexbeta - Instance]"
+
+func NewInstanceDataSource() datasource.DataSource {
+ return &instanceDataSource{}
+}
+
+type instanceDataSource struct {
+ client *sqlserverflexbetaPkg.APIClient
+ providerData core.ProviderData
+}
+
+func (d *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance"
+}
+
+func (d *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = sqlserverflexbetaGen.InstanceDataSourceSchema(ctx)
+}
+
+// Configure adds the provider configured client to the data source.
+func (d *instanceDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
+ var ok bool
+ d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClientConfigOptions := []config.ConfigurationOption{
+ config.WithCustomAuth(d.providerData.RoundTripper),
+ utils.UserAgentConfigOption(d.providerData.Version),
+ }
+ if d.providerData.SQLServerFlexCustomEndpoint != "" {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
+ )
+ } else {
+ apiClientConfigOptions = append(
+ apiClientConfigOptions,
+ config.WithRegion(d.providerData.GetRegion()),
+ )
+ }
+ apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error configuring API client",
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
+ )
+ return
+ }
+ d.client = apiClient
+ tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
+}
+
+func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data sqlserverflexbetaGen.InstanceModel
+
+ // Read Terraform configuration data into the model
+ resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := data.ProjectId.ValueString()
+ region := d.providerData.GetRegionWithOverride(data.Region)
+ instanceId := data.InstanceId.ValueString()
+
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
+
+ instanceResp, err := d.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ if err != nil {
+ utils.LogError(
+ ctx,
+ &resp.Diagnostics,
+ err,
+ "Reading instance",
+ fmt.Sprintf("instance with ID %q does not exist in project %q.", instanceId, projectId),
+ map[int]string{
+ http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
+ },
+ )
+ resp.State.RemoveResource(ctx)
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ err = mapDataResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ fmt.Sprintf("%s Read", errorPrefix),
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
+ return
+ }
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go
new file mode 100644
index 00000000..87476c3c
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go
@@ -0,0 +1,1579 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "backup_schedule": schema.StringAttribute{
+ Computed: true,
+ Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
+ MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
+ },
+ "edition": schema.StringAttribute{
+ Computed: true,
+ Description: "Edition of the MSSQL server instance",
+ MarkdownDescription: "Edition of the MSSQL server instance",
+ },
+ "encryption": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "kek_key_id": schema.StringAttribute{
+ Computed: true,
+ Description: "The key identifier",
+ MarkdownDescription: "The key identifier",
+ },
+ "kek_key_ring_id": schema.StringAttribute{
+ Computed: true,
+ Description: "The keyring identifier",
+ MarkdownDescription: "The keyring identifier",
+ },
+ "kek_key_version": schema.StringAttribute{
+ Computed: true,
+ Description: "The key version",
+ MarkdownDescription: "The key version",
+ },
+ "service_account": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ CustomType: EncryptionType{
+ ObjectType: types.ObjectType{
+ AttrTypes: EncryptionValue{}.AttributeTypes(ctx),
+ },
+ },
+ Computed: true,
+ Description: "this defines which key to use for storage encryption",
+ MarkdownDescription: "this defines which key to use for storage encryption",
+ },
+ "flavor_id": schema.StringAttribute{
+ Computed: true,
+ Description: "The id of the instance flavor.",
+ MarkdownDescription: "The id of the instance flavor.",
+ },
+ "id": schema.StringAttribute{
+ Computed: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "instance_id": schema.StringAttribute{
+ Required: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "is_deletable": schema.BoolAttribute{
+ Computed: true,
+ Description: "Whether the instance can be deleted or not.",
+ MarkdownDescription: "Whether the instance can be deleted or not.",
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ Description: "The name of the instance.",
+ MarkdownDescription: "The name of the instance.",
+ },
+ "network": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "access_scope": schema.StringAttribute{
+ Computed: true,
+ Description: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
+ MarkdownDescription: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
+ },
+ "acl": schema.ListAttribute{
+ ElementType: types.StringType,
+ Computed: true,
+ Description: "List of IPV4 cidr.",
+ MarkdownDescription: "List of IPV4 cidr.",
+ },
+ "instance_address": schema.StringAttribute{
+ Computed: true,
+ },
+ "router_address": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ CustomType: NetworkType{
+ ObjectType: types.ObjectType{
+ AttrTypes: NetworkValue{}.AttributeTypes(ctx),
+ },
+ },
+ Computed: true,
+ Description: "The access configuration of the instance",
+ MarkdownDescription: "The access configuration of the instance",
+ },
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "replicas": schema.Int64Attribute{
+ Computed: true,
+ Description: "How many replicas the instance should have.",
+ MarkdownDescription: "How many replicas the instance should have.",
+ },
+ "retention_days": schema.Int64Attribute{
+ Computed: true,
+ Description: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
+ MarkdownDescription: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ },
+ "storage": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "class": schema.StringAttribute{
+ Computed: true,
+ Description: "The storage class for the storage.",
+ MarkdownDescription: "The storage class for the storage.",
+ },
+ "size": schema.Int64Attribute{
+ Computed: true,
+ Description: "The storage size in Gigabytes.",
+ MarkdownDescription: "The storage size in Gigabytes.",
+ },
+ },
+ CustomType: StorageType{
+ ObjectType: types.ObjectType{
+ AttrTypes: StorageValue{}.AttributeTypes(ctx),
+ },
+ },
+ Computed: true,
+ Description: "The object containing information about the storage size and class.",
+ MarkdownDescription: "The object containing information about the storage size and class.",
+ },
+ "version": schema.StringAttribute{
+ Computed: true,
+ Description: "The sqlserver version used for the instance.",
+ MarkdownDescription: "The sqlserver version used for the instance.",
+ },
+ },
+ }
+}
+
+type InstanceModel struct {
+ BackupSchedule types.String `tfsdk:"backup_schedule"`
+ Edition types.String `tfsdk:"edition"`
+ Encryption EncryptionValue `tfsdk:"encryption"`
+ FlavorId types.String `tfsdk:"flavor_id"`
+ Id types.String `tfsdk:"id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ IsDeletable types.Bool `tfsdk:"is_deletable"`
+ Name types.String `tfsdk:"name"`
+ Network NetworkValue `tfsdk:"network"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Replicas types.Int64 `tfsdk:"replicas"`
+ RetentionDays types.Int64 `tfsdk:"retention_days"`
+ Status types.String `tfsdk:"status"`
+ Storage StorageValue `tfsdk:"storage"`
+ Version types.String `tfsdk:"version"`
+}
+
+var _ basetypes.ObjectTypable = EncryptionType{}
+
+type EncryptionType struct {
+ basetypes.ObjectType
+}
+
+func (t EncryptionType) Equal(o attr.Type) bool {
+ other, ok := o.(EncryptionType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t EncryptionType) String() string {
+ return "EncryptionType"
+}
+
+func (t EncryptionType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ kekKeyIdAttribute, ok := attributes["kek_key_id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_id is missing from object`)
+
+ return nil, diags
+ }
+
+ kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
+ }
+
+ kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_ring_id is missing from object`)
+
+ return nil, diags
+ }
+
+ kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
+ }
+
+ kekKeyVersionAttribute, ok := attributes["kek_key_version"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_version is missing from object`)
+
+ return nil, diags
+ }
+
+ kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
+ }
+
+ serviceAccountAttribute, ok := attributes["service_account"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `service_account is missing from object`)
+
+ return nil, diags
+ }
+
+ serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return EncryptionValue{
+ KekKeyId: kekKeyIdVal,
+ KekKeyRingId: kekKeyRingIdVal,
+ KekKeyVersion: kekKeyVersionVal,
+ ServiceAccount: serviceAccountVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewEncryptionValueNull() EncryptionValue {
+ return EncryptionValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewEncryptionValueUnknown() EncryptionValue {
+ return EncryptionValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewEncryptionValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (EncryptionValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing EncryptionValue Attribute Value",
+ "While creating a EncryptionValue value, a missing attribute value was detected. "+
+ "A EncryptionValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid EncryptionValue Attribute Type",
+ "While creating a EncryptionValue value, an invalid attribute value was detected. "+
+ "A EncryptionValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("EncryptionValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra EncryptionValue Attribute Value",
+ "While creating a EncryptionValue value, an extra attribute value was detected. "+
+ "A EncryptionValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra EncryptionValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ kekKeyIdAttribute, ok := attributes["kek_key_id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_id is missing from object`)
+
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
+ }
+
+ kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_ring_id is missing from object`)
+
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
+ }
+
+ kekKeyVersionAttribute, ok := attributes["kek_key_version"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_version is missing from object`)
+
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
+ }
+
+ serviceAccountAttribute, ok := attributes["service_account"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `service_account is missing from object`)
+
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
+ }
+
+ if diags.HasError() {
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ return EncryptionValue{
+ KekKeyId: kekKeyIdVal,
+ KekKeyRingId: kekKeyRingIdVal,
+ KekKeyVersion: kekKeyVersionVal,
+ ServiceAccount: serviceAccountVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewEncryptionValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) EncryptionValue {
+ object, diags := NewEncryptionValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewEncryptionValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t EncryptionType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewEncryptionValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewEncryptionValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewEncryptionValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewEncryptionValueMust(EncryptionValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t EncryptionType) ValueType(ctx context.Context) attr.Value {
+ return EncryptionValue{}
+}
+
+var _ basetypes.ObjectValuable = EncryptionValue{}
+
+type EncryptionValue struct {
+ KekKeyId basetypes.StringValue `tfsdk:"kek_key_id"`
+ KekKeyRingId basetypes.StringValue `tfsdk:"kek_key_ring_id"`
+ KekKeyVersion basetypes.StringValue `tfsdk:"kek_key_version"`
+ ServiceAccount basetypes.StringValue `tfsdk:"service_account"`
+ state attr.ValueState
+}
+
+func (v EncryptionValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 4)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["kek_key_id"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["kek_key_ring_id"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["kek_key_version"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["service_account"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 4)
+
+ val, err = v.KekKeyId.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["kek_key_id"] = val
+
+ val, err = v.KekKeyRingId.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["kek_key_ring_id"] = val
+
+ val, err = v.KekKeyVersion.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["kek_key_version"] = val
+
+ val, err = v.ServiceAccount.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["service_account"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v EncryptionValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v EncryptionValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v EncryptionValue) String() string {
+ return "EncryptionValue"
+}
+
+func (v EncryptionValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "kek_key_id": basetypes.StringType{},
+ "kek_key_ring_id": basetypes.StringType{},
+ "kek_key_version": basetypes.StringType{},
+ "service_account": basetypes.StringType{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "kek_key_id": v.KekKeyId,
+ "kek_key_ring_id": v.KekKeyRingId,
+ "kek_key_version": v.KekKeyVersion,
+ "service_account": v.ServiceAccount,
+ })
+
+ return objVal, diags
+}
+
+func (v EncryptionValue) Equal(o attr.Value) bool {
+ other, ok := o.(EncryptionValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.KekKeyId.Equal(other.KekKeyId) {
+ return false
+ }
+
+ if !v.KekKeyRingId.Equal(other.KekKeyRingId) {
+ return false
+ }
+
+ if !v.KekKeyVersion.Equal(other.KekKeyVersion) {
+ return false
+ }
+
+ if !v.ServiceAccount.Equal(other.ServiceAccount) {
+ return false
+ }
+
+ return true
+}
+
+func (v EncryptionValue) Type(ctx context.Context) attr.Type {
+ return EncryptionType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v EncryptionValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "kek_key_id": basetypes.StringType{},
+ "kek_key_ring_id": basetypes.StringType{},
+ "kek_key_version": basetypes.StringType{},
+ "service_account": basetypes.StringType{},
+ }
+}
+
+var _ basetypes.ObjectTypable = NetworkType{}
+
+type NetworkType struct {
+ basetypes.ObjectType
+}
+
+func (t NetworkType) Equal(o attr.Type) bool {
+ other, ok := o.(NetworkType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t NetworkType) String() string {
+ return "NetworkType"
+}
+
+func (t NetworkType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ accessScopeAttribute, ok := attributes["access_scope"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `access_scope is missing from object`)
+
+ return nil, diags
+ }
+
+ accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
+ }
+
+ aclAttribute, ok := attributes["acl"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `acl is missing from object`)
+
+ return nil, diags
+ }
+
+ aclVal, ok := aclAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
+ }
+
+ instanceAddressAttribute, ok := attributes["instance_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `instance_address is missing from object`)
+
+ return nil, diags
+ }
+
+ instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
+ }
+
+ routerAddressAttribute, ok := attributes["router_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `router_address is missing from object`)
+
+ return nil, diags
+ }
+
+ routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return NetworkValue{
+ AccessScope: accessScopeVal,
+ Acl: aclVal,
+ InstanceAddress: instanceAddressVal,
+ RouterAddress: routerAddressVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewNetworkValueNull() NetworkValue {
+ return NetworkValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewNetworkValueUnknown() NetworkValue {
+ return NetworkValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewNetworkValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (NetworkValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing NetworkValue Attribute Value",
+ "While creating a NetworkValue value, a missing attribute value was detected. "+
+ "A NetworkValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid NetworkValue Attribute Type",
+ "While creating a NetworkValue value, an invalid attribute value was detected. "+
+ "A NetworkValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("NetworkValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra NetworkValue Attribute Value",
+ "While creating a NetworkValue value, an extra attribute value was detected. "+
+ "A NetworkValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra NetworkValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewNetworkValueUnknown(), diags
+ }
+
+ accessScopeAttribute, ok := attributes["access_scope"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `access_scope is missing from object`)
+
+ return NewNetworkValueUnknown(), diags
+ }
+
+ accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
+ }
+
+ aclAttribute, ok := attributes["acl"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `acl is missing from object`)
+
+ return NewNetworkValueUnknown(), diags
+ }
+
+ aclVal, ok := aclAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
+ }
+
+ instanceAddressAttribute, ok := attributes["instance_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `instance_address is missing from object`)
+
+ return NewNetworkValueUnknown(), diags
+ }
+
+ instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
+ }
+
+ routerAddressAttribute, ok := attributes["router_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `router_address is missing from object`)
+
+ return NewNetworkValueUnknown(), diags
+ }
+
+ routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
+ }
+
+ if diags.HasError() {
+ return NewNetworkValueUnknown(), diags
+ }
+
+ return NetworkValue{
+ AccessScope: accessScopeVal,
+ Acl: aclVal,
+ InstanceAddress: instanceAddressVal,
+ RouterAddress: routerAddressVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewNetworkValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) NetworkValue {
+ object, diags := NewNetworkValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewNetworkValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t NetworkType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewNetworkValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewNetworkValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewNetworkValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewNetworkValueMust(NetworkValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t NetworkType) ValueType(ctx context.Context) attr.Value {
+ return NetworkValue{}
+}
+
+var _ basetypes.ObjectValuable = NetworkValue{}
+
+type NetworkValue struct {
+ AccessScope basetypes.StringValue `tfsdk:"access_scope"`
+ Acl basetypes.ListValue `tfsdk:"acl"`
+ InstanceAddress basetypes.StringValue `tfsdk:"instance_address"`
+ RouterAddress basetypes.StringValue `tfsdk:"router_address"`
+ state attr.ValueState
+}
+
+func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 4)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["access_scope"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["acl"] = basetypes.ListType{
+ ElemType: types.StringType,
+ }.TerraformType(ctx)
+ attrTypes["instance_address"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["router_address"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 4)
+
+ val, err = v.AccessScope.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["access_scope"] = val
+
+ val, err = v.Acl.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["acl"] = val
+
+ val, err = v.InstanceAddress.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["instance_address"] = val
+
+ val, err = v.RouterAddress.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["router_address"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v NetworkValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v NetworkValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v NetworkValue) String() string {
+ return "NetworkValue"
+}
+
+func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ var aclVal basetypes.ListValue
+ switch {
+ case v.Acl.IsUnknown():
+ aclVal = types.ListUnknown(types.StringType)
+ case v.Acl.IsNull():
+ aclVal = types.ListNull(types.StringType)
+ default:
+ var d diag.Diagnostics
+ aclVal, d = types.ListValue(types.StringType, v.Acl.Elements())
+ diags.Append(d...)
+ }
+
+ if diags.HasError() {
+ return types.ObjectUnknown(map[string]attr.Type{
+ "access_scope": basetypes.StringType{},
+ "acl": basetypes.ListType{
+ ElemType: types.StringType,
+ },
+ "instance_address": basetypes.StringType{},
+ "router_address": basetypes.StringType{},
+ }), diags
+ }
+
+ attributeTypes := map[string]attr.Type{
+ "access_scope": basetypes.StringType{},
+ "acl": basetypes.ListType{
+ ElemType: types.StringType,
+ },
+ "instance_address": basetypes.StringType{},
+ "router_address": basetypes.StringType{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "access_scope": v.AccessScope,
+ "acl": aclVal,
+ "instance_address": v.InstanceAddress,
+ "router_address": v.RouterAddress,
+ })
+
+ return objVal, diags
+}
+
+func (v NetworkValue) Equal(o attr.Value) bool {
+ other, ok := o.(NetworkValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.AccessScope.Equal(other.AccessScope) {
+ return false
+ }
+
+ if !v.Acl.Equal(other.Acl) {
+ return false
+ }
+
+ if !v.InstanceAddress.Equal(other.InstanceAddress) {
+ return false
+ }
+
+ if !v.RouterAddress.Equal(other.RouterAddress) {
+ return false
+ }
+
+ return true
+}
+
+func (v NetworkValue) Type(ctx context.Context) attr.Type {
+ return NetworkType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v NetworkValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "access_scope": basetypes.StringType{},
+ "acl": basetypes.ListType{
+ ElemType: types.StringType,
+ },
+ "instance_address": basetypes.StringType{},
+ "router_address": basetypes.StringType{},
+ }
+}
+
+var _ basetypes.ObjectTypable = StorageType{}
+
+type StorageType struct {
+ basetypes.ObjectType
+}
+
+func (t StorageType) Equal(o attr.Type) bool {
+ other, ok := o.(StorageType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t StorageType) String() string {
+ return "StorageType"
+}
+
+func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return nil, diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return nil, diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return StorageValue{
+ Class: classVal,
+ Size: sizeVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageValueNull() StorageValue {
+ return StorageValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewStorageValueUnknown() StorageValue {
+ return StorageValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing StorageValue Attribute Value",
+ "While creating a StorageValue value, a missing attribute value was detected. "+
+ "A StorageValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid StorageValue Attribute Type",
+ "While creating a StorageValue value, an invalid attribute value was detected. "+
+ "A StorageValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("StorageValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra StorageValue Attribute Value",
+ "While creating a StorageValue value, an extra attribute value was detected. "+
+ "A StorageValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra StorageValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewStorageValueUnknown(), diags
+ }
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return NewStorageValueUnknown(), diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return NewStorageValueUnknown(), diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ if diags.HasError() {
+ return NewStorageValueUnknown(), diags
+ }
+
+ return StorageValue{
+ Class: classVal,
+ Size: sizeVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageValue {
+ object, diags := NewStorageValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewStorageValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t StorageType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewStorageValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewStorageValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewStorageValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewStorageValueMust(StorageValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t StorageType) ValueType(ctx context.Context) attr.Value {
+ return StorageValue{}
+}
+
+var _ basetypes.ObjectValuable = StorageValue{}
+
+type StorageValue struct {
+ Class basetypes.StringValue `tfsdk:"class"`
+ Size basetypes.Int64Value `tfsdk:"size"`
+ state attr.ValueState
+}
+
+func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 2)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 2)
+
+ val, err = v.Class.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["class"] = val
+
+ val, err = v.Size.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["size"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v StorageValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v StorageValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v StorageValue) String() string {
+ return "StorageValue"
+}
+
+func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "size": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "class": v.Class,
+ "size": v.Size,
+ })
+
+ return objVal, diags
+}
+
+func (v StorageValue) Equal(o attr.Value) bool {
+ other, ok := o.(StorageValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Class.Equal(other.Class) {
+ return false
+ }
+
+ if !v.Size.Equal(other.Size) {
+ return false
+ }
+
+ return true
+}
+
+func (v StorageValue) Type(ctx context.Context) attr.Type {
+ return StorageType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "size": basetypes.Int64Type{},
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go
new file mode 100644
index 00000000..04fff1f6
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go
@@ -0,0 +1,1172 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "instances": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Computed: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "is_deletable": schema.BoolAttribute{
+ Computed: true,
+ Description: "Whether the instance can be deleted or not.",
+ MarkdownDescription: "Whether the instance can be deleted or not.",
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ Description: "The name of the instance.",
+ MarkdownDescription: "The name of the instance.",
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ CustomType: InstancesType{
+ ObjectType: types.ObjectType{
+ AttrTypes: InstancesValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "List of owned instances and their current status.",
+ MarkdownDescription: "List of owned instances and their current status.",
+ },
+ "page": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of the page of items list to be returned.",
+ MarkdownDescription: "Number of the page of items list to be returned.",
+ },
+ "pagination": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "page": schema.Int64Attribute{
+ Computed: true,
+ },
+ "size": schema.Int64Attribute{
+ Computed: true,
+ },
+ "sort": schema.StringAttribute{
+ Computed: true,
+ },
+ "total_pages": schema.Int64Attribute{
+ Computed: true,
+ },
+ "total_rows": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: PaginationType{
+ ObjectType: types.ObjectType{
+ AttrTypes: PaginationValue{}.AttributeTypes(ctx),
+ },
+ },
+ Computed: true,
+ },
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "size": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of items to be returned on each page.",
+ MarkdownDescription: "Number of items to be returned on each page.",
+ },
+ "sort": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "Sorting of the items to be returned on each page.",
+ MarkdownDescription: "Sorting of the items to be returned on each page.",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "index.desc",
+ "index.asc",
+ "id.desc",
+ "id.asc",
+ "is_deletable.desc",
+ "is_deletable.asc",
+ "name.asc",
+ "name.desc",
+ "status.asc",
+ "status.desc",
+ ),
+ },
+ },
+ },
+ }
+}
+
+type InstancesModel struct {
+ Instances types.List `tfsdk:"instances"`
+ Page types.Int64 `tfsdk:"page"`
+ Pagination PaginationValue `tfsdk:"pagination"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Size types.Int64 `tfsdk:"size"`
+ Sort types.String `tfsdk:"sort"`
+}
+
+var _ basetypes.ObjectTypable = InstancesType{}
+
+type InstancesType struct {
+ basetypes.ObjectType
+}
+
+func (t InstancesType) Equal(o attr.Type) bool {
+ other, ok := o.(InstancesType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t InstancesType) String() string {
+ return "InstancesType"
+}
+
+func (t InstancesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return nil, diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
+ }
+
+ isDeletableAttribute, ok := attributes["is_deletable"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `is_deletable is missing from object`)
+
+ return nil, diags
+ }
+
+ isDeletableVal, ok := isDeletableAttribute.(basetypes.BoolValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`is_deletable expected to be basetypes.BoolValue, was: %T`, isDeletableAttribute))
+ }
+
+ nameAttribute, ok := attributes["name"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `name is missing from object`)
+
+ return nil, diags
+ }
+
+ nameVal, ok := nameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
+ }
+
+ statusAttribute, ok := attributes["status"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `status is missing from object`)
+
+ return nil, diags
+ }
+
+ statusVal, ok := statusAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return InstancesValue{
+ Id: idVal,
+ IsDeletable: isDeletableVal,
+ Name: nameVal,
+ Status: statusVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewInstancesValueNull() InstancesValue {
+ return InstancesValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewInstancesValueUnknown() InstancesValue {
+ return InstancesValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewInstancesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (InstancesValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing InstancesValue Attribute Value",
+ "While creating a InstancesValue value, a missing attribute value was detected. "+
+ "A InstancesValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("InstancesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid InstancesValue Attribute Type",
+ "While creating a InstancesValue value, an invalid attribute value was detected. "+
+ "A InstancesValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("InstancesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("InstancesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra InstancesValue Attribute Value",
+ "While creating a InstancesValue value, an extra attribute value was detected. "+
+ "A InstancesValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra InstancesValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewInstancesValueUnknown(), diags
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return NewInstancesValueUnknown(), diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
+ }
+
+ isDeletableAttribute, ok := attributes["is_deletable"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `is_deletable is missing from object`)
+
+ return NewInstancesValueUnknown(), diags
+ }
+
+ isDeletableVal, ok := isDeletableAttribute.(basetypes.BoolValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`is_deletable expected to be basetypes.BoolValue, was: %T`, isDeletableAttribute))
+ }
+
+ nameAttribute, ok := attributes["name"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `name is missing from object`)
+
+ return NewInstancesValueUnknown(), diags
+ }
+
+ nameVal, ok := nameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
+ }
+
+ statusAttribute, ok := attributes["status"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `status is missing from object`)
+
+ return NewInstancesValueUnknown(), diags
+ }
+
+ statusVal, ok := statusAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
+ }
+
+ if diags.HasError() {
+ return NewInstancesValueUnknown(), diags
+ }
+
+ return InstancesValue{
+ Id: idVal,
+ IsDeletable: isDeletableVal,
+ Name: nameVal,
+ Status: statusVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewInstancesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) InstancesValue {
+ object, diags := NewInstancesValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewInstancesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t InstancesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewInstancesValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewInstancesValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewInstancesValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewInstancesValueMust(InstancesValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t InstancesType) ValueType(ctx context.Context) attr.Value {
+ return InstancesValue{}
+}
+
+var _ basetypes.ObjectValuable = InstancesValue{}
+
+type InstancesValue struct {
+ Id basetypes.StringValue `tfsdk:"id"`
+ IsDeletable basetypes.BoolValue `tfsdk:"is_deletable"`
+ Name basetypes.StringValue `tfsdk:"name"`
+ Status basetypes.StringValue `tfsdk:"status"`
+ state attr.ValueState
+}
+
+func (v InstancesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 4)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["is_deletable"] = basetypes.BoolType{}.TerraformType(ctx)
+ attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 4)
+
+ val, err = v.Id.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["id"] = val
+
+ val, err = v.IsDeletable.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["is_deletable"] = val
+
+ val, err = v.Name.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["name"] = val
+
+ val, err = v.Status.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["status"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v InstancesValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v InstancesValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v InstancesValue) String() string {
+ return "InstancesValue"
+}
+
+func (v InstancesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "id": basetypes.StringType{},
+ "is_deletable": basetypes.BoolType{},
+ "name": basetypes.StringType{},
+ "status": basetypes.StringType{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "id": v.Id,
+ "is_deletable": v.IsDeletable,
+ "name": v.Name,
+ "status": v.Status,
+ })
+
+ return objVal, diags
+}
+
+func (v InstancesValue) Equal(o attr.Value) bool {
+ other, ok := o.(InstancesValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Id.Equal(other.Id) {
+ return false
+ }
+
+ if !v.IsDeletable.Equal(other.IsDeletable) {
+ return false
+ }
+
+ if !v.Name.Equal(other.Name) {
+ return false
+ }
+
+ if !v.Status.Equal(other.Status) {
+ return false
+ }
+
+ return true
+}
+
+func (v InstancesValue) Type(ctx context.Context) attr.Type {
+ return InstancesType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v InstancesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "id": basetypes.StringType{},
+ "is_deletable": basetypes.BoolType{},
+ "name": basetypes.StringType{},
+ "status": basetypes.StringType{},
+ }
+}
+
+var _ basetypes.ObjectTypable = PaginationType{}
+
+type PaginationType struct {
+ basetypes.ObjectType
+}
+
+func (t PaginationType) Equal(o attr.Type) bool {
+ other, ok := o.(PaginationType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t PaginationType) String() string {
+ return "PaginationType"
+}
+
+func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return nil, diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return nil, diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return nil, diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return nil, diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return nil, diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueNull() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewPaginationValueUnknown() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing PaginationValue Attribute Value",
+ "While creating a PaginationValue value, a missing attribute value was detected. "+
+ "A PaginationValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid PaginationValue Attribute Type",
+ "While creating a PaginationValue value, an invalid attribute value was detected. "+
+ "A PaginationValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra PaginationValue Attribute Value",
+ "While creating a PaginationValue value, an extra attribute value was detected. "+
+ "A PaginationValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
+ object, diags := NewPaginationValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewPaginationValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewPaginationValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewPaginationValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t PaginationType) ValueType(ctx context.Context) attr.Value {
+ return PaginationValue{}
+}
+
+var _ basetypes.ObjectValuable = PaginationValue{}
+
+type PaginationValue struct {
+ Page basetypes.Int64Value `tfsdk:"page"`
+ Size basetypes.Int64Value `tfsdk:"size"`
+ Sort basetypes.StringValue `tfsdk:"sort"`
+ TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ state attr.ValueState
+}
+
+func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 5)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 5)
+
+ val, err = v.Page.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["page"] = val
+
+ val, err = v.Size.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["size"] = val
+
+ val, err = v.Sort.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["sort"] = val
+
+ val, err = v.TotalPages.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_pages"] = val
+
+ val, err = v.TotalRows.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_rows"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v PaginationValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v PaginationValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v PaginationValue) String() string {
+ return "PaginationValue"
+}
+
+func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "page": v.Page,
+ "size": v.Size,
+ "sort": v.Sort,
+ "total_pages": v.TotalPages,
+ "total_rows": v.TotalRows,
+ })
+
+ return objVal, diags
+}
+
+func (v PaginationValue) Equal(o attr.Value) bool {
+ other, ok := o.(PaginationValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Page.Equal(other.Page) {
+ return false
+ }
+
+ if !v.Size.Equal(other.Size) {
+ return false
+ }
+
+ if !v.Sort.Equal(other.Sort) {
+ return false
+ }
+
+ if !v.TotalPages.Equal(other.TotalPages) {
+ return false
+ }
+
+ if !v.TotalRows.Equal(other.TotalRows) {
+ return false
+ }
+
+ return true
+}
+
+func (v PaginationValue) Type(ctx context.Context) attr.Type {
+ return PaginationType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go
new file mode 100644
index 00000000..d66f358e
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/instance/functions.go
@@ -0,0 +1,306 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "math"
+
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ sqlserverflexbetaDataGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen"
+ sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
+)
+
+func mapResponseToModel(
+ ctx context.Context,
+ resp *sqlserverflexbeta.GetInstanceResponse,
+ m *sqlserverflexbetaResGen.InstanceModel,
+ tfDiags diag.Diagnostics,
+) error {
+ m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
+ m.Edition = types.StringValue(string(resp.GetEdition()))
+ m.Encryption = handleEncryption(m, resp)
+ m.FlavorId = types.StringValue(resp.GetFlavorId())
+ m.Id = types.StringValue(resp.GetId())
+ m.InstanceId = types.StringValue(resp.GetId())
+ m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
+ m.Name = types.StringValue(resp.GetName())
+ netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf(
+ "error converting network acl response value",
+ )
+ }
+ net, diags := sqlserverflexbetaResGen.NewNetworkValue(
+ sqlserverflexbetaResGen.NetworkValue{}.AttributeTypes(ctx),
+ map[string]attr.Value{
+ "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
+ "acl": netAcl,
+ "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
+ "router_address": types.StringValue(resp.Network.GetRouterAddress()),
+ },
+ )
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf(
+ "error converting network response value",
+ "access_scope",
+ types.StringValue(string(resp.Network.GetAccessScope())),
+ "acl",
+ netAcl,
+ "instance_address",
+ types.StringValue(resp.Network.GetInstanceAddress()),
+ "router_address",
+ types.StringValue(resp.Network.GetRouterAddress()),
+ )
+ }
+ m.Network = net
+ m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
+ m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
+ m.Status = types.StringValue(string(resp.GetStatus()))
+
+ stor, diags := sqlserverflexbetaResGen.NewStorageValue(
+ sqlserverflexbetaResGen.StorageValue{}.AttributeTypes(ctx),
+ map[string]attr.Value{
+ "class": types.StringValue(resp.Storage.GetClass()),
+ "size": types.Int64Value(resp.Storage.GetSize()),
+ },
+ )
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf("error converting storage response value")
+ }
+ m.Storage = stor
+
+ m.Version = types.StringValue(string(resp.GetVersion()))
+ return nil
+}
+
+func mapDataResponseToModel(
+ ctx context.Context,
+ resp *sqlserverflexbeta.GetInstanceResponse,
+ m *sqlserverflexbetaDataGen.InstanceModel,
+ tfDiags diag.Diagnostics,
+) error {
+ m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
+ m.Edition = types.StringValue(string(resp.GetEdition()))
+ m.Encryption = handleDSEncryption(m, resp)
+ m.FlavorId = types.StringValue(resp.GetFlavorId())
+ m.Id = types.StringValue(resp.GetId())
+ m.InstanceId = types.StringValue(resp.GetId())
+ m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
+ m.Name = types.StringValue(resp.GetName())
+ netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf(
+ "error converting network acl response value",
+ )
+ }
+ net, diags := sqlserverflexbetaDataGen.NewNetworkValue(
+ sqlserverflexbetaDataGen.NetworkValue{}.AttributeTypes(ctx),
+ map[string]attr.Value{
+ "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
+ "acl": netAcl,
+ "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
+ "router_address": types.StringValue(resp.Network.GetRouterAddress()),
+ },
+ )
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf(
+ "error converting network response value",
+ "access_scope",
+ types.StringValue(string(resp.Network.GetAccessScope())),
+ "acl",
+ netAcl,
+ "instance_address",
+ types.StringValue(resp.Network.GetInstanceAddress()),
+ "router_address",
+ types.StringValue(resp.Network.GetRouterAddress()),
+ )
+ }
+ m.Network = net
+ m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
+ m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
+ m.Status = types.StringValue(string(resp.GetStatus()))
+
+ stor, diags := sqlserverflexbetaDataGen.NewStorageValue(
+ sqlserverflexbetaDataGen.StorageValue{}.AttributeTypes(ctx),
+ map[string]attr.Value{
+ "class": types.StringValue(resp.Storage.GetClass()),
+ "size": types.Int64Value(resp.Storage.GetSize()),
+ },
+ )
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf("error converting storage response value")
+ }
+ m.Storage = stor
+
+ m.Version = types.StringValue(string(resp.GetVersion()))
+ return nil
+}
+
+func handleEncryption(
+ m *sqlserverflexbetaResGen.InstanceModel,
+ resp *sqlserverflexbeta.GetInstanceResponse,
+) sqlserverflexbetaResGen.EncryptionValue {
+ if !resp.HasEncryption() ||
+ resp.Encryption == nil ||
+ resp.Encryption.KekKeyId == nil ||
+ resp.Encryption.KekKeyRingId == nil ||
+ resp.Encryption.KekKeyVersion == nil ||
+ resp.Encryption.ServiceAccount == nil {
+
+ if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
+ return sqlserverflexbetaResGen.NewEncryptionValueNull()
+ }
+ return m.Encryption
+ }
+
+ enc := sqlserverflexbetaResGen.NewEncryptionValueNull()
+ if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
+ enc.KekKeyId = types.StringValue(kVal)
+ }
+ if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
+ enc.KekKeyRingId = types.StringValue(kkVal)
+ }
+ if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
+ enc.KekKeyVersion = types.StringValue(kkvVal)
+ }
+ if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
+ enc.ServiceAccount = types.StringValue(sa)
+ }
+ return enc
+}
+
+func handleDSEncryption(
+ m *sqlserverflexbetaDataGen.InstanceModel,
+ resp *sqlserverflexbeta.GetInstanceResponse,
+) sqlserverflexbetaDataGen.EncryptionValue {
+ if !resp.HasEncryption() ||
+ resp.Encryption == nil ||
+ resp.Encryption.KekKeyId == nil ||
+ resp.Encryption.KekKeyRingId == nil ||
+ resp.Encryption.KekKeyVersion == nil ||
+ resp.Encryption.ServiceAccount == nil {
+
+ if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
+ return sqlserverflexbetaDataGen.NewEncryptionValueNull()
+ }
+ return m.Encryption
+ }
+
+ enc := sqlserverflexbetaDataGen.NewEncryptionValueNull()
+ if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
+ enc.KekKeyId = types.StringValue(kVal)
+ }
+ if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
+ enc.KekKeyRingId = types.StringValue(kkVal)
+ }
+ if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
+ enc.KekKeyVersion = types.StringValue(kkvVal)
+ }
+ if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
+ enc.ServiceAccount = types.StringValue(sa)
+ }
+ return enc
+}
+
+func toCreatePayload(
+ ctx context.Context,
+ model *sqlserverflexbetaResGen.InstanceModel,
+) (*sqlserverflexbeta.CreateInstanceRequestPayload, error) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+
+ storagePayload := &sqlserverflexbeta.CreateInstanceRequestPayloadGetStorageArgType{}
+ if !model.Storage.IsNull() && !model.Storage.IsUnknown() {
+ storagePayload.Class = model.Storage.Class.ValueStringPointer()
+ storagePayload.Size = model.Storage.Size.ValueInt64Pointer()
+ }
+
+ var encryptionPayload *sqlserverflexbeta.CreateInstanceRequestPayloadGetEncryptionArgType = nil
+ if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() &&
+ !model.Encryption.KekKeyId.IsNull() && model.Encryption.KekKeyId.IsUnknown() && model.Encryption.KekKeyId.ValueString() != "" &&
+ !model.Encryption.KekKeyRingId.IsNull() && !model.Encryption.KekKeyRingId.IsUnknown() && model.Encryption.KekKeyRingId.ValueString() != "" &&
+ !model.Encryption.KekKeyVersion.IsNull() && !model.Encryption.KekKeyVersion.IsUnknown() && model.Encryption.KekKeyVersion.ValueString() != "" &&
+ !model.Encryption.ServiceAccount.IsNull() && !model.Encryption.ServiceAccount.IsUnknown() && model.Encryption.ServiceAccount.ValueString() != "" {
+ encryptionPayload = &sqlserverflexbeta.CreateInstanceRequestPayloadGetEncryptionArgType{
+ KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(),
+ KekKeyRingId: model.Encryption.KekKeyVersion.ValueStringPointer(),
+ KekKeyVersion: model.Encryption.KekKeyRingId.ValueStringPointer(),
+ ServiceAccount: model.Encryption.ServiceAccount.ValueStringPointer(),
+ }
+ }
+
+ networkPayload := &sqlserverflexbeta.CreateInstanceRequestPayloadGetNetworkArgType{}
+ if !model.Network.IsNull() && !model.Network.IsUnknown() {
+ networkPayload.AccessScope = sqlserverflexbeta.CreateInstanceRequestPayloadNetworkGetAccessScopeAttributeType(
+ model.Network.AccessScope.ValueStringPointer(),
+ )
+
+ var resList []string
+ diags := model.Network.Acl.ElementsAs(ctx, &resList, false)
+ if diags.HasError() {
+ return nil, fmt.Errorf("error converting network acl list")
+ }
+ networkPayload.Acl = &resList
+ }
+
+ return &sqlserverflexbeta.CreateInstanceRequestPayload{
+ BackupSchedule: conversion.StringValueToPointer(model.BackupSchedule),
+ Encryption: encryptionPayload,
+ FlavorId: conversion.StringValueToPointer(model.FlavorId),
+ Name: conversion.StringValueToPointer(model.Name),
+ Network: networkPayload,
+ RetentionDays: conversion.Int64ValueToPointer(model.RetentionDays),
+ Storage: storagePayload,
+ Version: sqlserverflexbeta.CreateInstanceRequestPayloadGetVersionAttributeType(
+ conversion.StringValueToPointer(model.Version),
+ ),
+ }, nil
+
+}
+
+func toUpdatePayload(
+ ctx context.Context,
+ m *sqlserverflexbetaResGen.InstanceModel,
+ resp *resource.UpdateResponse,
+) (*sqlserverflexbeta.UpdateInstanceRequestPayload, error) {
+ if m == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+ if m.Replicas.ValueInt64() > math.MaxUint32 {
+ return nil, fmt.Errorf("replicas value is too big for uint32")
+ }
+ replVal := sqlserverflexbeta.Replicas(uint32(m.Replicas.ValueInt64()))
+
+ var netAcl []string
+ diags := m.Network.Acl.ElementsAs(ctx, &netAcl, false)
+ resp.Diagnostics.Append(diags...)
+ if diags.HasError() {
+ return nil, fmt.Errorf("error converting model network acl value")
+ }
+ return &sqlserverflexbeta.UpdateInstanceRequestPayload{
+ BackupSchedule: m.BackupSchedule.ValueStringPointer(),
+ FlavorId: m.FlavorId.ValueStringPointer(),
+ Name: m.Name.ValueStringPointer(),
+ Network: &sqlserverflexbeta.UpdateInstanceRequestPayloadNetwork{
+ Acl: &netAcl,
+ },
+ Replicas: &replVal,
+ RetentionDays: m.RetentionDays.ValueInt64Pointer(),
+ Storage: &sqlserverflexbeta.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()},
+ Version: sqlserverflexbeta.UpdateInstanceRequestPayloadGetVersionAttributeType(
+ m.Version.ValueStringPointer(),
+ ),
+ }, nil
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml
new file mode 100644
index 00000000..71d4cbe4
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml
@@ -0,0 +1,124 @@
+fields:
+ - name: 'id'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'instance_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'project_id'
+ validators:
+ - validate.NoSeparator
+ - validate.UUID
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'name'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'backup_schedule'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'encryption.kek_key_id'
+ validators:
+ - validate.NoSeparator
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'encryption.kek_key_version'
+ validators:
+ - validate.NoSeparator
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'encryption.kek_key_ring_id'
+ validators:
+ - validate.NoSeparator
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'encryption.service_account'
+ validators:
+ - validate.NoSeparator
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'network.access_scope'
+ validators:
+ - validate.NoSeparator
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'network.acl'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'network.instance_address'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'network.router_address'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'status'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'region'
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'retention_days'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'edition'
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'version'
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'replicas'
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'storage'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'storage.class'
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'storage.size'
+ modifiers:
+ - 'UseStateForUnknown'
+
+ - name: 'flavor_id'
+ modifiers:
+ - 'UseStateForUnknown'
+ - 'RequiresReplace'
+
+ - name: 'is_deletable'
+ modifiers:
+ - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go
new file mode 100644
index 00000000..a6325169
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/instance/resource.go
@@ -0,0 +1,521 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ _ "embed"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
+)
+
+var (
+ _ resource.Resource = &instanceResource{}
+ _ resource.ResourceWithConfigure = &instanceResource{}
+ _ resource.ResourceWithImportState = &instanceResource{}
+ _ resource.ResourceWithModifyPlan = &instanceResource{}
+ _ resource.ResourceWithIdentity = &instanceResource{}
+)
+
+func NewInstanceResource() resource.Resource {
+ return &instanceResource{}
+}
+
+type instanceResource struct {
+ client *sqlserverflexbeta.APIClient
+ providerData core.ProviderData
+}
+
+type InstanceResourceIdentityModel struct {
+ ProjectID types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ InstanceID types.String `tfsdk:"instance_id"`
+}
+
+func (r *instanceResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance"
+}
+
+func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = sqlserverflexbetaResGen.InstanceResourceSchema(ctx)
+}
+
+func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
+ resp.IdentitySchema = identityschema.Schema{
+ Attributes: map[string]identityschema.Attribute{
+ "project_id": identityschema.StringAttribute{
+ RequiredForImport: true, // must be set during import by the practitioner
+ },
+ "region": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ "instance_id": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ },
+ }
+}
+
+// Configure adds the provider configured client to the resource.
+func (r *instanceResource) Configure(
+ ctx context.Context,
+ req resource.ConfigureRequest,
+ resp *resource.ConfigureResponse,
+) {
+ var ok bool
+ r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClientConfigOptions := []config.ConfigurationOption{
+ config.WithCustomAuth(r.providerData.RoundTripper),
+ utils.UserAgentConfigOption(r.providerData.Version),
+ }
+ if r.providerData.SQLServerFlexCustomEndpoint != "" {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint))
+ } else {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
+ }
+ apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error configuring API client",
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
+ )
+ return
+ }
+ r.client = apiClient
+ tflog.Info(ctx, "sqlserverflexbeta.Instance client configured")
+}
+
+// ModifyPlan implements resource.ResourceWithModifyPlan.
+// Use the modifier to set the effective region in the current plan.
+func (r *instanceResource) ModifyPlan(
+ ctx context.Context,
+ req resource.ModifyPlanRequest,
+ resp *resource.ModifyPlanResponse,
+) { // nolint:gocritic // function signature required by Terraform
+
+ // skip initial empty configuration to avoid follow-up errors
+ if req.Config.Raw.IsNull() {
+ return
+ }
+ var configModel sqlserverflexbetaResGen.InstanceModel
+ resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ if req.Plan.Raw.IsNull() {
+ return
+ }
+ var planModel sqlserverflexbetaResGen.InstanceModel
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var identityModel InstanceResourceIdentityModel
+ identityModel.ProjectID = planModel.ProjectId
+ identityModel.Region = planModel.Region
+ if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() {
+ identityModel.InstanceID = planModel.InstanceId
+ }
+
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+//go:embed planModifiers.yaml
+var modifiersFileByte []byte
+
+func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data sqlserverflexbetaResGen.InstanceModel
+
+ // Read Terraform plan data into the model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData InstanceResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Generate API request body from model
+ payload, err := toCreatePayload(ctx, &data)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating Instance",
+ fmt.Sprintf("Creating API payload: %v", err),
+ )
+ return
+ }
+ // Create new Instance
+ createResp, err := r.client.CreateInstanceRequest(
+ ctx,
+ projectId,
+ region,
+ ).CreateInstanceRequestPayload(*payload).Execute()
+ if err != nil {
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating Instance", fmt.Sprintf("Calling API: %v", err))
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ InstanceId := *createResp.Id
+
+ // Example data value setting
+ data.InstanceId = types.StringValue("id-from-response")
+
+ identity := InstanceResourceIdentityModel{
+ ProjectID: types.StringValue(projectId),
+ Region: types.StringValue(region),
+ InstanceID: types.StringValue(InstanceId),
+ }
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ waitResp, err := wait.CreateInstanceWaitHandler(
+ ctx,
+ r.client,
+ projectId,
+ InstanceId,
+ region,
+ ).SetSleepBeforeWait(
+ 30 * time.Second,
+ ).SetTimeout(
+ 90 * time.Minute,
+ ).WaitWithContext(ctx)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating Instance",
+ fmt.Sprintf("Instance creation waiting: %v", err),
+ )
+ return
+ }
+
+ if waitResp.Id == nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating Instance",
+ "Instance creation waiting: returned id is nil",
+ )
+ return
+ }
+
+ // Map response body to schema
+ err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating Instance",
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
+ return
+ }
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "sqlserverflexbeta.Instance created")
+}
+
+func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data sqlserverflexbetaResGen.InstanceModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData InstanceResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ instanceId := data.InstanceId.ValueString()
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
+
+ instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ if err != nil {
+ oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
+ if ok && oapiErr.StatusCode == http.StatusNotFound {
+ resp.State.RemoveResource(ctx)
+ return
+ }
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", err.Error())
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ // Map response body to schema
+ err = mapResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error reading instance",
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
+ return
+ }
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ identity := InstanceResourceIdentityModel{
+ ProjectID: types.StringValue(projectId),
+ Region: types.StringValue(region),
+ InstanceID: types.StringValue(instanceId),
+ }
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ tflog.Info(ctx, "sqlserverflexbeta.Instance read")
+}
+
+func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data sqlserverflexbetaResGen.InstanceModel
+ updateInstanceError := "Error updating instance"
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData InstanceResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ instanceId := data.InstanceId.ValueString()
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
+
+ // Generate API request body from model
+ payload, err := toUpdatePayload(ctx, &data, resp)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ updateInstanceError,
+ fmt.Sprintf("Creating API payload: %v", err),
+ )
+ return
+ }
+ // Update existing instance
+ err = r.client.UpdateInstanceRequest(
+ ctx,
+ projectId,
+ region,
+ instanceId,
+ ).UpdateInstanceRequestPayload(*payload).Execute()
+ if err != nil {
+ core.LogAndAddError(ctx, &resp.Diagnostics, updateInstanceError, err.Error())
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ waitResp, err := wait.UpdateInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ updateInstanceError,
+ fmt.Sprintf("Instance update waiting: %v", err),
+ )
+ return
+ }
+
+ // Map response body to schema
+ err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ updateInstanceError,
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
+ return
+ }
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "sqlserverflexbeta.Instance updated")
+}
+
+func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data sqlserverflexbetaResGen.InstanceModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData InstanceResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ instanceId := identityData.InstanceID.ValueString()
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
+
+ // Delete existing instance
+ err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ if err != nil {
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ _, err = wait.DeleteInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error deleting instance",
+ fmt.Sprintf("Instance deletion waiting: %v", err),
+ )
+ return
+ }
+
+ tflog.Info(ctx, "sqlserverflexbeta.Instance deleted")
+}
+
+// ImportState imports a resource into the Terraform state on success.
+// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
+func (r *instanceResource) ImportState(
+ ctx context.Context,
+ req resource.ImportStateRequest,
+ resp *resource.ImportStateResponse,
+) {
+ ctx = core.InitProviderContext(ctx)
+
+ if req.ID != "" {
+ idParts := strings.Split(req.ID, core.Separator)
+
+ if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
+ core.LogAndAddError(ctx, &resp.Diagnostics,
+ "Error importing instance",
+ fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ return
+ }
+
+ var identityData InstanceResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(
+ resp.State.SetAttribute(
+ ctx,
+ path.Root("id"),
+ utils.BuildInternalTerraformId(
+ identityData.ProjectID.ValueString(),
+ identityData.Region.ValueString(),
+ identityData.InstanceID.ValueString(),
+ ),
+ )...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...)
+
+ tflog.Info(ctx, "Sqlserverflexbeta instance state imported")
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go
new file mode 100644
index 00000000..f8865ae5
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go
@@ -0,0 +1,1597 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+)
+
+func InstanceResourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "backup_schedule": schema.StringAttribute{
+ Required: true,
+ Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
+ MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
+ },
+ "edition": schema.StringAttribute{
+ Computed: true,
+ Description: "Edition of the MSSQL server instance",
+ MarkdownDescription: "Edition of the MSSQL server instance",
+ },
+ "encryption": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "kek_key_id": schema.StringAttribute{
+ Required: true,
+ Description: "The key identifier",
+ MarkdownDescription: "The key identifier",
+ },
+ "kek_key_ring_id": schema.StringAttribute{
+ Required: true,
+ Description: "The keyring identifier",
+ MarkdownDescription: "The keyring identifier",
+ },
+ "kek_key_version": schema.StringAttribute{
+ Required: true,
+ Description: "The key version",
+ MarkdownDescription: "The key version",
+ },
+ "service_account": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ CustomType: EncryptionType{
+ ObjectType: types.ObjectType{
+ AttrTypes: EncryptionValue{}.AttributeTypes(ctx),
+ },
+ },
+ Optional: true,
+ Computed: true,
+ Description: "this defines which key to use for storage encryption",
+ MarkdownDescription: "this defines which key to use for storage encryption",
+ },
+ "flavor_id": schema.StringAttribute{
+ Required: true,
+ Description: "The id of the instance flavor.",
+ MarkdownDescription: "The id of the instance flavor.",
+ },
+ "id": schema.StringAttribute{
+ Computed: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "instance_id": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "is_deletable": schema.BoolAttribute{
+ Computed: true,
+ Description: "Whether the instance can be deleted or not.",
+ MarkdownDescription: "Whether the instance can be deleted or not.",
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: "The name of the instance.",
+ MarkdownDescription: "The name of the instance.",
+ },
+ "network": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "access_scope": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
+ MarkdownDescription: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "PUBLIC",
+ "SNA",
+ ),
+ },
+ Default: stringdefault.StaticString("PUBLIC"),
+ },
+ "acl": schema.ListAttribute{
+ ElementType: types.StringType,
+ Required: true,
+ Description: "List of IPV4 cidr.",
+ MarkdownDescription: "List of IPV4 cidr.",
+ },
+ "instance_address": schema.StringAttribute{
+ Computed: true,
+ },
+ "router_address": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ CustomType: NetworkType{
+ ObjectType: types.ObjectType{
+ AttrTypes: NetworkValue{}.AttributeTypes(ctx),
+ },
+ },
+ Required: true,
+ Description: "the network configuration of the instance.",
+ MarkdownDescription: "the network configuration of the instance.",
+ },
+ "project_id": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "replicas": schema.Int64Attribute{
+ Computed: true,
+ Description: "How many replicas the instance should have.",
+ MarkdownDescription: "How many replicas the instance should have.",
+ },
+ "retention_days": schema.Int64Attribute{
+ Required: true,
+ Description: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
+ MarkdownDescription: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ },
+ "storage": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "class": schema.StringAttribute{
+ Required: true,
+ Description: "The storage class for the storage.",
+ MarkdownDescription: "The storage class for the storage.",
+ },
+ "size": schema.Int64Attribute{
+ Required: true,
+ Description: "The storage size in Gigabytes.",
+ MarkdownDescription: "The storage size in Gigabytes.",
+ },
+ },
+ CustomType: StorageType{
+ ObjectType: types.ObjectType{
+ AttrTypes: StorageValue{}.AttributeTypes(ctx),
+ },
+ },
+ Required: true,
+ Description: "The object containing information about the storage size and class.",
+ MarkdownDescription: "The object containing information about the storage size and class.",
+ },
+ "version": schema.StringAttribute{
+ Required: true,
+ Description: "The sqlserver version used for the instance.",
+ MarkdownDescription: "The sqlserver version used for the instance.",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "2022",
+ ),
+ },
+ },
+ },
+ }
+}
+
+type InstanceModel struct {
+ BackupSchedule types.String `tfsdk:"backup_schedule"`
+ Edition types.String `tfsdk:"edition"`
+ Encryption EncryptionValue `tfsdk:"encryption"`
+ FlavorId types.String `tfsdk:"flavor_id"`
+ Id types.String `tfsdk:"id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ IsDeletable types.Bool `tfsdk:"is_deletable"`
+ Name types.String `tfsdk:"name"`
+ Network NetworkValue `tfsdk:"network"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Replicas types.Int64 `tfsdk:"replicas"`
+ RetentionDays types.Int64 `tfsdk:"retention_days"`
+ Status types.String `tfsdk:"status"`
+ Storage StorageValue `tfsdk:"storage"`
+ Version types.String `tfsdk:"version"`
+}
+
+var _ basetypes.ObjectTypable = EncryptionType{}
+
+type EncryptionType struct {
+ basetypes.ObjectType
+}
+
+func (t EncryptionType) Equal(o attr.Type) bool {
+ other, ok := o.(EncryptionType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t EncryptionType) String() string {
+ return "EncryptionType"
+}
+
+func (t EncryptionType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ kekKeyIdAttribute, ok := attributes["kek_key_id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_id is missing from object`)
+
+ return nil, diags
+ }
+
+ kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
+ }
+
+ kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_ring_id is missing from object`)
+
+ return nil, diags
+ }
+
+ kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
+ }
+
+ kekKeyVersionAttribute, ok := attributes["kek_key_version"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_version is missing from object`)
+
+ return nil, diags
+ }
+
+ kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
+ }
+
+ serviceAccountAttribute, ok := attributes["service_account"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `service_account is missing from object`)
+
+ return nil, diags
+ }
+
+ serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return EncryptionValue{
+ KekKeyId: kekKeyIdVal,
+ KekKeyRingId: kekKeyRingIdVal,
+ KekKeyVersion: kekKeyVersionVal,
+ ServiceAccount: serviceAccountVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewEncryptionValueNull() EncryptionValue {
+ return EncryptionValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewEncryptionValueUnknown() EncryptionValue {
+ return EncryptionValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewEncryptionValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (EncryptionValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing EncryptionValue Attribute Value",
+ "While creating a EncryptionValue value, a missing attribute value was detected. "+
+ "A EncryptionValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid EncryptionValue Attribute Type",
+ "While creating a EncryptionValue value, an invalid attribute value was detected. "+
+ "A EncryptionValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("EncryptionValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra EncryptionValue Attribute Value",
+ "While creating a EncryptionValue value, an extra attribute value was detected. "+
+ "A EncryptionValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra EncryptionValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ kekKeyIdAttribute, ok := attributes["kek_key_id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_id is missing from object`)
+
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
+ }
+
+ kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_ring_id is missing from object`)
+
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
+ }
+
+ kekKeyVersionAttribute, ok := attributes["kek_key_version"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `kek_key_version is missing from object`)
+
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
+ }
+
+ serviceAccountAttribute, ok := attributes["service_account"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `service_account is missing from object`)
+
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
+ }
+
+ if diags.HasError() {
+ return NewEncryptionValueUnknown(), diags
+ }
+
+ return EncryptionValue{
+ KekKeyId: kekKeyIdVal,
+ KekKeyRingId: kekKeyRingIdVal,
+ KekKeyVersion: kekKeyVersionVal,
+ ServiceAccount: serviceAccountVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewEncryptionValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) EncryptionValue {
+ object, diags := NewEncryptionValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewEncryptionValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t EncryptionType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewEncryptionValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewEncryptionValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewEncryptionValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewEncryptionValueMust(EncryptionValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t EncryptionType) ValueType(ctx context.Context) attr.Value {
+ return EncryptionValue{}
+}
+
+var _ basetypes.ObjectValuable = EncryptionValue{}
+
+type EncryptionValue struct {
+ KekKeyId basetypes.StringValue `tfsdk:"kek_key_id"`
+ KekKeyRingId basetypes.StringValue `tfsdk:"kek_key_ring_id"`
+ KekKeyVersion basetypes.StringValue `tfsdk:"kek_key_version"`
+ ServiceAccount basetypes.StringValue `tfsdk:"service_account"`
+ state attr.ValueState
+}
+
+func (v EncryptionValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 4)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["kek_key_id"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["kek_key_ring_id"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["kek_key_version"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["service_account"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 4)
+
+ val, err = v.KekKeyId.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["kek_key_id"] = val
+
+ val, err = v.KekKeyRingId.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["kek_key_ring_id"] = val
+
+ val, err = v.KekKeyVersion.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["kek_key_version"] = val
+
+ val, err = v.ServiceAccount.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["service_account"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v EncryptionValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v EncryptionValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v EncryptionValue) String() string {
+ return "EncryptionValue"
+}
+
+func (v EncryptionValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "kek_key_id": basetypes.StringType{},
+ "kek_key_ring_id": basetypes.StringType{},
+ "kek_key_version": basetypes.StringType{},
+ "service_account": basetypes.StringType{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "kek_key_id": v.KekKeyId,
+ "kek_key_ring_id": v.KekKeyRingId,
+ "kek_key_version": v.KekKeyVersion,
+ "service_account": v.ServiceAccount,
+ })
+
+ return objVal, diags
+}
+
+func (v EncryptionValue) Equal(o attr.Value) bool {
+ other, ok := o.(EncryptionValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.KekKeyId.Equal(other.KekKeyId) {
+ return false
+ }
+
+ if !v.KekKeyRingId.Equal(other.KekKeyRingId) {
+ return false
+ }
+
+ if !v.KekKeyVersion.Equal(other.KekKeyVersion) {
+ return false
+ }
+
+ if !v.ServiceAccount.Equal(other.ServiceAccount) {
+ return false
+ }
+
+ return true
+}
+
+func (v EncryptionValue) Type(ctx context.Context) attr.Type {
+ return EncryptionType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v EncryptionValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "kek_key_id": basetypes.StringType{},
+ "kek_key_ring_id": basetypes.StringType{},
+ "kek_key_version": basetypes.StringType{},
+ "service_account": basetypes.StringType{},
+ }
+}
+
+var _ basetypes.ObjectTypable = NetworkType{}
+
+type NetworkType struct {
+ basetypes.ObjectType
+}
+
+func (t NetworkType) Equal(o attr.Type) bool {
+ other, ok := o.(NetworkType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t NetworkType) String() string {
+ return "NetworkType"
+}
+
+func (t NetworkType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ accessScopeAttribute, ok := attributes["access_scope"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `access_scope is missing from object`)
+
+ return nil, diags
+ }
+
+ accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
+ }
+
+ aclAttribute, ok := attributes["acl"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `acl is missing from object`)
+
+ return nil, diags
+ }
+
+ aclVal, ok := aclAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
+ }
+
+ instanceAddressAttribute, ok := attributes["instance_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `instance_address is missing from object`)
+
+ return nil, diags
+ }
+
+ instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
+ }
+
+ routerAddressAttribute, ok := attributes["router_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `router_address is missing from object`)
+
+ return nil, diags
+ }
+
+ routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return NetworkValue{
+ AccessScope: accessScopeVal,
+ Acl: aclVal,
+ InstanceAddress: instanceAddressVal,
+ RouterAddress: routerAddressVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewNetworkValueNull() NetworkValue {
+ return NetworkValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewNetworkValueUnknown() NetworkValue {
+ return NetworkValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewNetworkValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (NetworkValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing NetworkValue Attribute Value",
+ "While creating a NetworkValue value, a missing attribute value was detected. "+
+ "A NetworkValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid NetworkValue Attribute Type",
+ "While creating a NetworkValue value, an invalid attribute value was detected. "+
+ "A NetworkValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("NetworkValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra NetworkValue Attribute Value",
+ "While creating a NetworkValue value, an extra attribute value was detected. "+
+ "A NetworkValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra NetworkValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewNetworkValueUnknown(), diags
+ }
+
+ accessScopeAttribute, ok := attributes["access_scope"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `access_scope is missing from object`)
+
+ return NewNetworkValueUnknown(), diags
+ }
+
+ accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
+ }
+
+ aclAttribute, ok := attributes["acl"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `acl is missing from object`)
+
+ return NewNetworkValueUnknown(), diags
+ }
+
+ aclVal, ok := aclAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
+ }
+
+ instanceAddressAttribute, ok := attributes["instance_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `instance_address is missing from object`)
+
+ return NewNetworkValueUnknown(), diags
+ }
+
+ instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
+ }
+
+ routerAddressAttribute, ok := attributes["router_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `router_address is missing from object`)
+
+ return NewNetworkValueUnknown(), diags
+ }
+
+ routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
+ }
+
+ if diags.HasError() {
+ return NewNetworkValueUnknown(), diags
+ }
+
+ return NetworkValue{
+ AccessScope: accessScopeVal,
+ Acl: aclVal,
+ InstanceAddress: instanceAddressVal,
+ RouterAddress: routerAddressVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewNetworkValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) NetworkValue {
+ object, diags := NewNetworkValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewNetworkValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t NetworkType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewNetworkValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewNetworkValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewNetworkValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewNetworkValueMust(NetworkValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t NetworkType) ValueType(ctx context.Context) attr.Value {
+ return NetworkValue{}
+}
+
+var _ basetypes.ObjectValuable = NetworkValue{}
+
+type NetworkValue struct {
+ AccessScope basetypes.StringValue `tfsdk:"access_scope"`
+ Acl basetypes.ListValue `tfsdk:"acl"`
+ InstanceAddress basetypes.StringValue `tfsdk:"instance_address"`
+ RouterAddress basetypes.StringValue `tfsdk:"router_address"`
+ state attr.ValueState
+}
+
+func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 4)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["access_scope"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["acl"] = basetypes.ListType{
+ ElemType: types.StringType,
+ }.TerraformType(ctx)
+ attrTypes["instance_address"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["router_address"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 4)
+
+ val, err = v.AccessScope.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["access_scope"] = val
+
+ val, err = v.Acl.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["acl"] = val
+
+ val, err = v.InstanceAddress.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["instance_address"] = val
+
+ val, err = v.RouterAddress.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["router_address"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v NetworkValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v NetworkValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v NetworkValue) String() string {
+ return "NetworkValue"
+}
+
+func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ var aclVal basetypes.ListValue
+ switch {
+ case v.Acl.IsUnknown():
+ aclVal = types.ListUnknown(types.StringType)
+ case v.Acl.IsNull():
+ aclVal = types.ListNull(types.StringType)
+ default:
+ var d diag.Diagnostics
+ aclVal, d = types.ListValue(types.StringType, v.Acl.Elements())
+ diags.Append(d...)
+ }
+
+ if diags.HasError() {
+ return types.ObjectUnknown(map[string]attr.Type{
+ "access_scope": basetypes.StringType{},
+ "acl": basetypes.ListType{
+ ElemType: types.StringType,
+ },
+ "instance_address": basetypes.StringType{},
+ "router_address": basetypes.StringType{},
+ }), diags
+ }
+
+ attributeTypes := map[string]attr.Type{
+ "access_scope": basetypes.StringType{},
+ "acl": basetypes.ListType{
+ ElemType: types.StringType,
+ },
+ "instance_address": basetypes.StringType{},
+ "router_address": basetypes.StringType{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "access_scope": v.AccessScope,
+ "acl": aclVal,
+ "instance_address": v.InstanceAddress,
+ "router_address": v.RouterAddress,
+ })
+
+ return objVal, diags
+}
+
+func (v NetworkValue) Equal(o attr.Value) bool {
+ other, ok := o.(NetworkValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.AccessScope.Equal(other.AccessScope) {
+ return false
+ }
+
+ if !v.Acl.Equal(other.Acl) {
+ return false
+ }
+
+ if !v.InstanceAddress.Equal(other.InstanceAddress) {
+ return false
+ }
+
+ if !v.RouterAddress.Equal(other.RouterAddress) {
+ return false
+ }
+
+ return true
+}
+
+func (v NetworkValue) Type(ctx context.Context) attr.Type {
+ return NetworkType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v NetworkValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "access_scope": basetypes.StringType{},
+ "acl": basetypes.ListType{
+ ElemType: types.StringType,
+ },
+ "instance_address": basetypes.StringType{},
+ "router_address": basetypes.StringType{},
+ }
+}
+
+var _ basetypes.ObjectTypable = StorageType{}
+
+type StorageType struct {
+ basetypes.ObjectType
+}
+
+func (t StorageType) Equal(o attr.Type) bool {
+ other, ok := o.(StorageType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t StorageType) String() string {
+ return "StorageType"
+}
+
+func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return nil, diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return nil, diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return StorageValue{
+ Class: classVal,
+ Size: sizeVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageValueNull() StorageValue {
+ return StorageValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewStorageValueUnknown() StorageValue {
+ return StorageValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing StorageValue Attribute Value",
+ "While creating a StorageValue value, a missing attribute value was detected. "+
+ "A StorageValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid StorageValue Attribute Type",
+ "While creating a StorageValue value, an invalid attribute value was detected. "+
+ "A StorageValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("StorageValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra StorageValue Attribute Value",
+ "While creating a StorageValue value, an extra attribute value was detected. "+
+ "A StorageValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra StorageValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewStorageValueUnknown(), diags
+ }
+
+ classAttribute, ok := attributes["class"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `class is missing from object`)
+
+ return NewStorageValueUnknown(), diags
+ }
+
+ classVal, ok := classAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return NewStorageValueUnknown(), diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ if diags.HasError() {
+ return NewStorageValueUnknown(), diags
+ }
+
+ return StorageValue{
+ Class: classVal,
+ Size: sizeVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewStorageValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageValue {
+ object, diags := NewStorageValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewStorageValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t StorageType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewStorageValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewStorageValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewStorageValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewStorageValueMust(StorageValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t StorageType) ValueType(ctx context.Context) attr.Value {
+ return StorageValue{}
+}
+
+var _ basetypes.ObjectValuable = StorageValue{}
+
+type StorageValue struct {
+ Class basetypes.StringValue `tfsdk:"class"`
+ Size basetypes.Int64Value `tfsdk:"size"`
+ state attr.ValueState
+}
+
+func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 2)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 2)
+
+ val, err = v.Class.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["class"] = val
+
+ val, err = v.Size.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["size"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v StorageValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v StorageValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v StorageValue) String() string {
+ return "StorageValue"
+}
+
+func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "size": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "class": v.Class,
+ "size": v.Size,
+ })
+
+ return objVal, diags
+}
+
+func (v StorageValue) Equal(o attr.Value) bool {
+ other, ok := o.(StorageValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Class.Equal(other.Class) {
+ return false
+ }
+
+ if !v.Size.Equal(other.Size) {
+ return false
+ }
+
+ return true
+}
+
+func (v StorageValue) Type(ctx context.Context) attr.Type {
+ return StorageType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "size": basetypes.Int64Type{},
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go
new file mode 100644
index 00000000..985692a6
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/user/datasource.go
@@ -0,0 +1,118 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ sqlserverflexbetaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+
+ sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils"
+
+ sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/datasources_gen"
+)
+
+var _ datasource.DataSource = (*userDataSource)(nil)
+
+const errorPrefix = "[Sqlserverflexbeta - User]"
+
+func NewUserDataSource() datasource.DataSource {
+ return &userDataSource{}
+}
+
+type userDataSource struct {
+ client *sqlserverflexbetaPkg.APIClient
+ providerData core.ProviderData
+}
+
+func (d *userDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
+}
+
+func (d *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = sqlserverflexbetaGen.UserDataSourceSchema(ctx)
+}
+
+// Configure adds the provider configured client to the data source.
+func (d *userDataSource) Configure(
+ ctx context.Context,
+ req datasource.ConfigureRequest,
+ resp *datasource.ConfigureResponse,
+) {
+ var ok bool
+ d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ d.client = apiClient
+ tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
+}
+
+func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data sqlserverflexbetaGen.UserModel
+
+ // Read Terraform configuration data into the model
+ resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := data.ProjectId.ValueString()
+ region := d.providerData.GetRegionWithOverride(data.Region)
+ userId := data.UserId.ValueString()
+
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+ ctx = tflog.SetField(ctx, "user_id", userId)
+
+ userResp, err := d.client.GetUserRequest(ctx, projectId, region, userId).Execute()
+ if err != nil {
+ utils.LogError(
+ ctx,
+ &resp.Diagnostics,
+ err,
+ "Reading user",
+ fmt.Sprintf("user with ID %q does not exist in project %q.", userId, projectId),
+ map[int]string{
+ http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
+ },
+ )
+ resp.State.RemoveResource(ctx)
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ // Todo: Read API call logic
+
+ // Example data value setting
+ // data.Id = types.StringValue("example-id")
+
+ err = mapResponseToModel(ctx, userResp, &data, resp.Diagnostics)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ fmt.Sprintf("%s Read", errorPrefix),
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
+ return
+ }
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go
new file mode 100644
index 00000000..1950c24e
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go
@@ -0,0 +1,1118 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func UserDataSourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "instance_id": schema.StringAttribute{
+ Required: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "page": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of the page of items list to be returned.",
+ MarkdownDescription: "Number of the page of items list to be returned.",
+ },
+ "pagination": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "page": schema.Int64Attribute{
+ Computed: true,
+ },
+ "size": schema.Int64Attribute{
+ Computed: true,
+ },
+ "sort": schema.StringAttribute{
+ Computed: true,
+ },
+ "total_pages": schema.Int64Attribute{
+ Computed: true,
+ },
+ "total_rows": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ CustomType: PaginationType{
+ ObjectType: types.ObjectType{
+ AttrTypes: PaginationValue{}.AttributeTypes(ctx),
+ },
+ },
+ Computed: true,
+ },
+ "project_id": schema.StringAttribute{
+ Required: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "size": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Number of items to be returned on each page.",
+ MarkdownDescription: "Number of items to be returned on each page.",
+ },
+ "sort": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "Sorting of the users to be returned on each page.",
+ MarkdownDescription: "Sorting of the users to be returned on each page.",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "id.asc",
+ "id.desc",
+ "index.desc",
+ "index.asc",
+ "name.desc",
+ "name.asc",
+ "status.desc",
+ "status.asc",
+ ),
+ },
+ },
+ "users": schema.ListNestedAttribute{
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "id": schema.Int64Attribute{
+ Computed: true,
+ Description: "The ID of the user.",
+ MarkdownDescription: "The ID of the user.",
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ Description: "The current status of the user.",
+ MarkdownDescription: "The current status of the user.",
+ },
+ "username": schema.StringAttribute{
+ Computed: true,
+ Description: "The name of the user.",
+ MarkdownDescription: "The name of the user.",
+ },
+ },
+ CustomType: UsersType{
+ ObjectType: types.ObjectType{
+ AttrTypes: UsersValue{}.AttributeTypes(ctx),
+ },
+ },
+ },
+ Computed: true,
+ Description: "List of all users inside an instance",
+ MarkdownDescription: "List of all users inside an instance",
+ },
+ },
+ }
+}
+
+type UserModel struct {
+ InstanceId types.String `tfsdk:"instance_id"`
+ Page types.Int64 `tfsdk:"page"`
+ Pagination PaginationValue `tfsdk:"pagination"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Size types.Int64 `tfsdk:"size"`
+ Sort types.String `tfsdk:"sort"`
+ Users types.List `tfsdk:"users"`
+}
+
+var _ basetypes.ObjectTypable = PaginationType{}
+
+type PaginationType struct {
+ basetypes.ObjectType
+}
+
+func (t PaginationType) Equal(o attr.Type) bool {
+ other, ok := o.(PaginationType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t PaginationType) String() string {
+ return "PaginationType"
+}
+
+func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return nil, diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return nil, diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return nil, diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return nil, diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return nil, diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueNull() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewPaginationValueUnknown() PaginationValue {
+ return PaginationValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing PaginationValue Attribute Value",
+ "While creating a PaginationValue value, a missing attribute value was detected. "+
+ "A PaginationValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid PaginationValue Attribute Type",
+ "While creating a PaginationValue value, an invalid attribute value was detected. "+
+ "A PaginationValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra PaginationValue Attribute Value",
+ "While creating a PaginationValue value, an extra attribute value was detected. "+
+ "A PaginationValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageAttribute, ok := attributes["page"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `page is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
+ }
+
+ sizeAttribute, ok := attributes["size"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `size is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
+ }
+
+ sortAttribute, ok := attributes["sort"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `sort is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ sortVal, ok := sortAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
+ }
+
+ totalPagesAttribute, ok := attributes["total_pages"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_pages is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
+ }
+
+ totalRowsAttribute, ok := attributes["total_rows"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `total_rows is missing from object`)
+
+ return NewPaginationValueUnknown(), diags
+ }
+
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
+ }
+
+ if diags.HasError() {
+ return NewPaginationValueUnknown(), diags
+ }
+
+ return PaginationValue{
+ Page: pageVal,
+ Size: sizeVal,
+ Sort: sortVal,
+ TotalPages: totalPagesVal,
+ TotalRows: totalRowsVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
+ object, diags := NewPaginationValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewPaginationValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewPaginationValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewPaginationValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t PaginationType) ValueType(ctx context.Context) attr.Value {
+ return PaginationValue{}
+}
+
+var _ basetypes.ObjectValuable = PaginationValue{}
+
+type PaginationValue struct {
+ Page basetypes.Int64Value `tfsdk:"page"`
+ Size basetypes.Int64Value `tfsdk:"size"`
+ Sort basetypes.StringValue `tfsdk:"sort"`
+ TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
+ state attr.ValueState
+}
+
+func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 5)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 5)
+
+ val, err = v.Page.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["page"] = val
+
+ val, err = v.Size.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["size"] = val
+
+ val, err = v.Sort.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["sort"] = val
+
+ val, err = v.TotalPages.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_pages"] = val
+
+ val, err = v.TotalRows.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["total_rows"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v PaginationValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v PaginationValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v PaginationValue) String() string {
+ return "PaginationValue"
+}
+
+func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "page": v.Page,
+ "size": v.Size,
+ "sort": v.Sort,
+ "total_pages": v.TotalPages,
+ "total_rows": v.TotalRows,
+ })
+
+ return objVal, diags
+}
+
+func (v PaginationValue) Equal(o attr.Value) bool {
+ other, ok := o.(PaginationValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Page.Equal(other.Page) {
+ return false
+ }
+
+ if !v.Size.Equal(other.Size) {
+ return false
+ }
+
+ if !v.Sort.Equal(other.Sort) {
+ return false
+ }
+
+ if !v.TotalPages.Equal(other.TotalPages) {
+ return false
+ }
+
+ if !v.TotalRows.Equal(other.TotalRows) {
+ return false
+ }
+
+ return true
+}
+
+func (v PaginationValue) Type(ctx context.Context) attr.Type {
+ return PaginationType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
+ "sort": basetypes.StringType{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
+ }
+}
+
+var _ basetypes.ObjectTypable = UsersType{}
+
+type UsersType struct {
+ basetypes.ObjectType
+}
+
+func (t UsersType) Equal(o attr.Type) bool {
+ other, ok := o.(UsersType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t UsersType) String() string {
+ return "UsersType"
+}
+
+func (t UsersType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return nil, diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
+ }
+
+ statusAttribute, ok := attributes["status"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `status is missing from object`)
+
+ return nil, diags
+ }
+
+ statusVal, ok := statusAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
+ }
+
+ usernameAttribute, ok := attributes["username"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `username is missing from object`)
+
+ return nil, diags
+ }
+
+ usernameVal, ok := usernameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`username expected to be basetypes.StringValue, was: %T`, usernameAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return UsersValue{
+ Id: idVal,
+ Status: statusVal,
+ Username: usernameVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewUsersValueNull() UsersValue {
+ return UsersValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewUsersValueUnknown() UsersValue {
+ return UsersValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewUsersValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (UsersValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing UsersValue Attribute Value",
+ "While creating a UsersValue value, a missing attribute value was detected. "+
+ "A UsersValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("UsersValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid UsersValue Attribute Type",
+ "While creating a UsersValue value, an invalid attribute value was detected. "+
+ "A UsersValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("UsersValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("UsersValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra UsersValue Attribute Value",
+ "While creating a UsersValue value, an extra attribute value was detected. "+
+ "A UsersValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra UsersValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewUsersValueUnknown(), diags
+ }
+
+ idAttribute, ok := attributes["id"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `id is missing from object`)
+
+ return NewUsersValueUnknown(), diags
+ }
+
+ idVal, ok := idAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
+ }
+
+ statusAttribute, ok := attributes["status"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `status is missing from object`)
+
+ return NewUsersValueUnknown(), diags
+ }
+
+ statusVal, ok := statusAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
+ }
+
+ usernameAttribute, ok := attributes["username"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `username is missing from object`)
+
+ return NewUsersValueUnknown(), diags
+ }
+
+ usernameVal, ok := usernameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`username expected to be basetypes.StringValue, was: %T`, usernameAttribute))
+ }
+
+ if diags.HasError() {
+ return NewUsersValueUnknown(), diags
+ }
+
+ return UsersValue{
+ Id: idVal,
+ Status: statusVal,
+ Username: usernameVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewUsersValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) UsersValue {
+ object, diags := NewUsersValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewUsersValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t UsersType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewUsersValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewUsersValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewUsersValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewUsersValueMust(UsersValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t UsersType) ValueType(ctx context.Context) attr.Value {
+ return UsersValue{}
+}
+
+var _ basetypes.ObjectValuable = UsersValue{}
+
+type UsersValue struct {
+ Id basetypes.Int64Value `tfsdk:"id"`
+ Status basetypes.StringValue `tfsdk:"status"`
+ Username basetypes.StringValue `tfsdk:"username"`
+ state attr.ValueState
+}
+
+func (v UsersValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 3)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["username"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 3)
+
+ val, err = v.Id.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["id"] = val
+
+ val, err = v.Status.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["status"] = val
+
+ val, err = v.Username.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["username"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v UsersValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v UsersValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v UsersValue) String() string {
+ return "UsersValue"
+}
+
+func (v UsersValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributeTypes := map[string]attr.Type{
+ "id": basetypes.Int64Type{},
+ "status": basetypes.StringType{},
+ "username": basetypes.StringType{},
+ }
+
+ if v.IsNull() {
+ return types.ObjectNull(attributeTypes), diags
+ }
+
+ if v.IsUnknown() {
+ return types.ObjectUnknown(attributeTypes), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ attributeTypes,
+ map[string]attr.Value{
+ "id": v.Id,
+ "status": v.Status,
+ "username": v.Username,
+ })
+
+ return objVal, diags
+}
+
+func (v UsersValue) Equal(o attr.Value) bool {
+ other, ok := o.(UsersValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Id.Equal(other.Id) {
+ return false
+ }
+
+ if !v.Status.Equal(other.Status) {
+ return false
+ }
+
+ if !v.Username.Equal(other.Username) {
+ return false
+ }
+
+ return true
+}
+
+func (v UsersValue) Type(ctx context.Context) attr.Type {
+ return UsersType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v UsersValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "id": basetypes.Int64Type{},
+ "status": basetypes.StringType{},
+ "username": basetypes.StringType{},
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/functions.go b/stackit/internal/services/sqlserverflexbeta/user/functions.go
new file mode 100644
index 00000000..b565f761
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/user/functions.go
@@ -0,0 +1,98 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "math"
+
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+
+ sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+ sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
+)
+
+func mapResponseToModel(
+ ctx context.Context,
+ resp *sqlserverflexbeta.GetUserResponse,
+ m *sqlserverflexbetaResGen.UserModel,
+ tfDiags diag.Diagnostics,
+) error {
+ // TODO: complete and refactor
+ m.Id = types.StringValue(resp.GetId())
+
+ /*
+ sampleList, diags := types.ListValueFrom(ctx, types.StringType, resp.GetList())
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf(
+ "error converting list response value",
+ )
+ }
+ sample, diags := sqlserverflexbetaResGen.NewSampleValue(
+ sqlserverflexbetaResGen.SampleValue{}.AttributeTypes(ctx),
+ map[string]attr.Value{
+ "field": types.StringValue(string(resp.GetField())),
+ },
+ )
+ tfDiags.Append(diags...)
+ if diags.HasError() {
+ return fmt.Errorf(
+ "error converting sample response value",
+ "sample",
+ types.StringValue(string(resp.GetField())),
+ )
+ }
+ m.Sample = sample
+ */
+ return nil
+}
+
+func handleEncryption(
+ m *sqlserverflexbetaResGen.UserModel,
+ resp *sqlserverflexbeta.GetUserResponse,
+) sqlserverflexbetaResGen.EncryptionValue {
+ if !resp.HasEncryption() ||
+ resp.Encryption == nil ||
+ resp.Encryption.KekKeyId == nil ||
+ resp.Encryption.KekKeyRingId == nil ||
+ resp.Encryption.KekKeyVersion == nil ||
+ resp.Encryption.ServiceAccount == nil {
+
+ if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
+ return sqlserverflexbetaResGen.NewEncryptionValueNull()
+ }
+ return m.Encryption
+ }
+
+ enc := sqlserverflexbetaResGen.NewEncryptionValueNull()
+ if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
+ enc.KekKeyId = types.StringValue(kVal)
+ }
+ if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
+ enc.KekKeyRingId = types.StringValue(kkVal)
+ }
+ if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
+ enc.KekKeyVersion = types.StringValue(kkvVal)
+ }
+ if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
+ enc.ServiceAccount = types.StringValue(sa)
+ }
+ return enc
+}
+
+func toCreatePayload(
+ ctx context.Context,
+ model *sqlserverflexbetaResGen.UserModel,
+) (*sqlserverflexbeta.CreateUserRequestPayload, error) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+
+ return &sqlserverflexbeta.CreateUserRequestPayload{
+ // TODO: fill fields
+ }, nil
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go
new file mode 100644
index 00000000..e2692e13
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/user/resource.go
@@ -0,0 +1,411 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/resources_gen"
+)
+
+var (
+ _ resource.Resource = &userResource{}
+ _ resource.ResourceWithConfigure = &userResource{}
+ _ resource.ResourceWithImportState = &userResource{}
+ _ resource.ResourceWithModifyPlan = &userResource{}
+ _ resource.ResourceWithIdentity = &userResource{}
+)
+
+func NewUserResource() resource.Resource {
+ return &userResource{}
+}
+
+type userResource struct {
+ client *sqlserverflexbeta.APIClient
+ providerData core.ProviderData
+}
+
+type UserResourceIdentityModel struct {
+ ProjectID types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ UserID types.String `tfsdk:"instance_id"`
+ // TODO: implement further needed parts
+}
+
+func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
+}
+
+func (r *userResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = sqlserverflexbetaResGen.UserResourceSchema(ctx)
+}
+
+func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
+ resp.IdentitySchema = identityschema.Schema{
+ Attributes: map[string]identityschema.Attribute{
+ "project_id": identityschema.StringAttribute{
+ RequiredForImport: true, // must be set during import by the practitioner
+ },
+ "region": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ "instance_id": identityschema.StringAttribute{
+ RequiredForImport: true, // can be defaulted by the provider configuration
+ },
+ },
+ }
+}
+
+// Configure adds the provider configured client to the resource.
+func (r *userResource) Configure(
+ ctx context.Context,
+ req resource.ConfigureRequest,
+ resp *resource.ConfigureResponse,
+) {
+ var ok bool
+ r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClientConfigOptions := []config.ConfigurationOption{
+ config.WithCustomAuth(r.providerData.RoundTripper),
+ utils.UserAgentConfigOption(r.providerData.Version),
+ }
+ if r.providerData.SqlserverflexbetaCustomEndpoint != "" {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.sqlserverflexbetaCustomEndpoint))
+ } else {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
+ }
+ apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error configuring API client",
+ fmt.Sprintf(
+ "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
+ err,
+ ),
+ )
+ return
+ }
+ r.client = apiClient
+ tflog.Info(ctx, "sqlserverflexbeta.User client configured")
+}
+
+func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data sqlserverflexbetaResGen.UserModel
+
+ // Read Terraform plan data into the model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // TODO: Create API call logic
+ /*
+ // Generate API request body from model
+ payload, err := toCreatePayload(ctx, &model)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating User",
+ fmt.Sprintf("Creating API payload: %v", err),
+ )
+ return
+ }
+ // Create new User
+ createResp, err := r.client.CreateUserRequest(
+ ctx,
+ projectId,
+ region,
+ ).CreateUserRequestPayload(*payload).Execute()
+ if err != nil {
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating User", fmt.Sprintf("Calling API: %v", err))
+ return
+ }
+
+ ctx = core.LogResponse(ctx)
+
+ UserId := *createResp.Id
+ */
+
+ // Example data value setting
+ data.UserId = types.StringValue("id-from-response")
+
+ // TODO: Set data returned by API in identity
+ identity := UserResourceIdentityModel{
+ ProjectID: types.StringValue(projectId),
+ Region: types.StringValue(region),
+ // TODO: add missing values
+ UserID: types.StringValue(UserId),
+ }
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // TODO: implement wait handler if needed
+ /*
+
+ waitResp, err := wait.CreateUserWaitHandler(
+ ctx,
+ r.client,
+ projectId,
+ UserId,
+ region,
+ ).SetSleepBeforeWait(
+ 30 * time.Second,
+ ).SetTimeout(
+ 90 * time.Minute,
+ ).WaitWithContext(ctx)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating User",
+ fmt.Sprintf("User creation waiting: %v", err),
+ )
+ return
+ }
+
+ if waitResp.Id == nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating User",
+ "User creation waiting: returned id is nil",
+ )
+ return
+ }
+
+ // Map response body to schema
+ err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics)
+ if err != nil {
+ core.LogAndAddError(
+ ctx,
+ &resp.Diagnostics,
+ "Error creating User",
+ fmt.Sprintf("Processing API payload: %v", err),
+ )
+ return
+ }
+
+ */
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "sqlserverflexbeta.User created")
+}
+
+func (r *userResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data sqlserverflexbetaResGen.UserModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Todo: Read API call logic
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ // TODO: Set data returned by API in identity
+ identity := UserResourceIdentityModel{
+ ProjectID: types.StringValue(projectId),
+ Region: types.StringValue(region),
+ // InstanceID: types.StringValue(instanceId),
+ }
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ tflog.Info(ctx, "sqlserverflexbeta.User read")
+}
+
+func (r *userResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data sqlserverflexbetaResGen.UserModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Todo: Update API call logic
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "sqlserverflexbeta.User updated")
+}
+
+func (r *userResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data sqlserverflexbetaResGen.UserModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Read identity data
+ var identityData UserResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = core.InitProviderContext(ctx)
+
+ projectId := identityData.ProjectID.ValueString()
+ region := identityData.Region.ValueString()
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "region", region)
+
+ // Todo: Delete API call logic
+
+ tflog.Info(ctx, "sqlserverflexbeta.User deleted")
+}
+
+// ModifyPlan implements resource.ResourceWithModifyPlan.
+// Use the modifier to set the effective region in the current plan.
+func (r *userResource) ModifyPlan(
+ ctx context.Context,
+ req resource.ModifyPlanRequest,
+ resp *resource.ModifyPlanResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var configModel sqlserverflexbetaResGen.UserModel
+ // skip initial empty configuration to avoid follow-up errors
+ if req.Config.Raw.IsNull() {
+ return
+ }
+ resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var planModel sqlserverflexbetaResGen.UserModel
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var identityModel UserResourceIdentityModel
+ identityModel.ProjectID = planModel.ProjectId
+ identityModel.Region = planModel.Region
+ // TODO: complete
+ //if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() {
+ // identityModel.InstanceID = planModel.InstanceId
+ //}
+
+ resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+// ImportState imports a resource into the Terraform state on success.
+// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
+func (r *userResource) ImportState(
+ ctx context.Context,
+ req resource.ImportStateRequest,
+ resp *resource.ImportStateResponse,
+) {
+ idParts := strings.Split(req.ID, core.Separator)
+
+ // Todo: Import logic
+ if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing database",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],..., got %q",
+ req.ID,
+ ),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ // ... more ...
+
+ core.LogAndAddWarning(
+ ctx,
+ &resp.Diagnostics,
+ "Sqlserverflexbeta database imported with empty password",
+ "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
+ )
+ tflog.Info(ctx, "Sqlserverflexbeta user state imported")
+}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go
new file mode 100644
index 00000000..3bf7f4fc
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go
@@ -0,0 +1,111 @@
+// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
+
+package sqlserverflexbeta
+
+import (
+ "context"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+)
+
+func UserResourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "default_database": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The default database for a user of the instance.",
+ MarkdownDescription: "The default database for a user of the instance.",
+ },
+ "host": schema.StringAttribute{
+ Computed: true,
+ Description: "The host of the instance in which the user belongs to.",
+ MarkdownDescription: "The host of the instance in which the user belongs to.",
+ },
+ "id": schema.Int64Attribute{
+ Computed: true,
+ Description: "The ID of the user.",
+ MarkdownDescription: "The ID of the user.",
+ },
+ "instance_id": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The ID of the instance.",
+ MarkdownDescription: "The ID of the instance.",
+ },
+ "password": schema.StringAttribute{
+ Computed: true,
+ Description: "The password for the user.",
+ MarkdownDescription: "The password for the user.",
+ },
+ "port": schema.Int64Attribute{
+ Computed: true,
+ Description: "The port of the instance in which the user belongs to.",
+ MarkdownDescription: "The port of the instance in which the user belongs to.",
+ },
+ "project_id": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The STACKIT project ID.",
+ MarkdownDescription: "The STACKIT project ID.",
+ },
+ "region": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "The region which should be addressed",
+ MarkdownDescription: "The region which should be addressed",
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "eu01",
+ ),
+ },
+ },
+ "roles": schema.ListAttribute{
+ ElementType: types.StringType,
+ Required: true,
+ Description: "A list containing the user roles for the instance.",
+ MarkdownDescription: "A list containing the user roles for the instance.",
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ Description: "The current status of the user.",
+ MarkdownDescription: "The current status of the user.",
+ },
+ "uri": schema.StringAttribute{
+ Computed: true,
+ Description: "The connection string for the user to the instance.",
+ MarkdownDescription: "The connection string for the user to the instance.",
+ },
+ "user_id": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "The ID of the user.",
+ MarkdownDescription: "The ID of the user.",
+ },
+ "username": schema.StringAttribute{
+ Required: true,
+ Description: "The name of the user.",
+ MarkdownDescription: "The name of the user.",
+ },
+ },
+ }
+}
+
+type UserModel struct {
+ DefaultDatabase types.String `tfsdk:"default_database"`
+ Host types.String `tfsdk:"host"`
+ Id types.Int64 `tfsdk:"id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Roles types.List `tfsdk:"roles"`
+ Status types.String `tfsdk:"status"`
+ Uri types.String `tfsdk:"uri"`
+ UserId types.Int64 `tfsdk:"user_id"`
+ Username types.String `tfsdk:"username"`
+}
diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go
new file mode 100644
index 00000000..bd813d82
--- /dev/null
+++ b/stackit/internal/wait/sqlserverflexbeta/wait.go
@@ -0,0 +1,159 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
+ "github.com/stackitcloud/stackit-sdk-go/core/wait"
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+)
+
+// READY, PENDING, PROGRESSING, FAILURE, UNKNOWN,
+const (
+ InstanceStateEmpty = ""
+ InstanceStateSuccess = "READY"
+ InstanceStatePending = "PENDING"
+ InstanceStateProcessing = "PROGRESSING"
+ InstanceStateFailed = "FAILURE"
+ InstanceStateUnknown = "UNKNOWN"
+ InstanceStateTerminating = "TERMINATING"
+)
+
+// APIClientInterface Interface needed for tests
+type APIClientInterface interface {
+ GetInstanceRequestExecute(ctx context.Context, projectId, region, instanceId string) (*sqlserverflex.GetInstanceResponse, error)
+ GetDatabaseRequestExecute(ctx context.Context, projectId string, region string, instanceId string, databaseName string) (*sqlserverflex.GetDatabaseResponse, error)
+ GetUserRequestExecute(ctx context.Context, projectId string, region string, instanceId string, userId int64) (*sqlserverflex.GetUserResponse, error)
+}
+
+// CreateInstanceWaitHandler will wait for instance creation
+func CreateInstanceWaitHandler(
+ ctx context.Context,
+ a APIClientInterface,
+ projectId, instanceId, region string,
+) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
+ handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
+ s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
+ if err != nil {
+ return false, nil, err
+ }
+ if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
+ return false, nil, nil
+ }
+ switch strings.ToLower(string(*s.Status)) {
+ case strings.ToLower(InstanceStateSuccess):
+ if *s.Network.AccessScope == "SNA" {
+ if s.Network.InstanceAddress == nil {
+ tflog.Info(ctx, "Waiting for instance_address")
+ return false, nil, nil
+ }
+ if s.Network.RouterAddress == nil {
+ tflog.Info(ctx, "Waiting for router_address")
+ return false, nil, nil
+ }
+ }
+ return true, s, nil
+ case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
+ return true, s, fmt.Errorf("create failed for instance with id %s", instanceId)
+ case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
+ tflog.Info(ctx, "request is being handled", map[string]interface{}{
+ "status": *s.Status,
+ })
+ return false, nil, nil
+ default:
+ tflog.Info(ctx, "Wait (create) received unknown status", map[string]interface{}{
+ "instanceId": instanceId,
+ "status": s.Status,
+ })
+ return false, s, nil
+ }
+ })
+ return handler
+}
+
+// UpdateInstanceWaitHandler will wait for instance update
+func UpdateInstanceWaitHandler(ctx context.Context, a APIClientInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
+ handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
+ s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
+ if err != nil {
+ return false, nil, err
+ }
+ if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
+ return false, nil, nil
+ }
+ switch strings.ToLower(string(*s.Status)) {
+ case strings.ToLower(InstanceStateSuccess):
+ return true, s, nil
+ case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
+ return true, s, fmt.Errorf("update failed for instance with id %s", instanceId)
+ case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
+ tflog.Info(ctx, "request is being handled", map[string]interface{}{
+ "status": *s.Status,
+ })
+ return false, nil, nil
+ default:
+ tflog.Info(ctx, "Wait (update) received unknown status", map[string]interface{}{
+ "instanceId": instanceId,
+ "status": s.Status,
+ })
+ return false, s, nil
+ }
+ })
+ handler.SetSleepBeforeWait(15 * time.Second)
+ handler.SetTimeout(45 * time.Minute)
+ return handler
+}
+
+// DeleteInstanceWaitHandler will wait for instance deletion
+func DeleteInstanceWaitHandler(ctx context.Context, a APIClientInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[struct{}] {
+ handler := wait.New(func() (waitFinished bool, response *struct{}, err error) {
+ _, err = a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
+ if err == nil {
+ return false, nil, nil
+ }
+ var oapiErr *oapierror.GenericOpenAPIError
+ ok := errors.As(err, &oapiErr)
+ if !ok {
+ return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
+ }
+ if oapiErr.StatusCode != http.StatusNotFound {
+ return false, nil, err
+ }
+ return true, nil, nil
+ })
+ handler.SetTimeout(15 * time.Minute)
+ return handler
+}
+
+// CreateDatabaseWaitHandler will wait for instance creation
+func CreateDatabaseWaitHandler(
+ ctx context.Context,
+ a APIClientInterface,
+ projectId, instanceId, region, databaseName string,
+) *wait.AsyncActionHandler[sqlserverflex.GetDatabaseResponse] {
+ handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetDatabaseResponse, err error) {
+ s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName)
+ if err != nil {
+ return false, nil, err
+ }
+ if s == nil || s.Name == nil || *s.Name != databaseName {
+ return false, nil, nil
+ }
+ var oapiErr *oapierror.GenericOpenAPIError
+ ok := errors.As(err, &oapiErr)
+ if !ok {
+ return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
+ }
+ if oapiErr.StatusCode != http.StatusNotFound {
+ return false, nil, err
+ }
+ return true, nil, nil
+ })
+ return handler
+}
diff --git a/stackit/internal/wait/sqlserverflexbeta/wait_test.go b/stackit/internal/wait/sqlserverflexbeta/wait_test.go
new file mode 100644
index 00000000..e1ccc9c5
--- /dev/null
+++ b/stackit/internal/wait/sqlserverflexbeta/wait_test.go
@@ -0,0 +1,258 @@
+package sqlserverflexbeta
+
+import (
+ "context"
+ "testing"
+ "time"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
+)
+
+// Used for testing instance operations
+type apiClientInstanceMocked struct {
+ instanceId string
+ instanceState string
+ instanceNetwork sqlserverflex.InstanceNetwork
+ instanceIsDeleted bool
+ instanceGetFails bool
+}
+
+func (a *apiClientInstanceMocked) GetInstanceRequestExecute(_ context.Context, _, _, _ string) (*sqlserverflex.GetInstanceResponse, error) {
+ if a.instanceGetFails {
+ return nil, &oapierror.GenericOpenAPIError{
+ StatusCode: 500,
+ }
+ }
+
+ if a.instanceIsDeleted {
+ return nil, &oapierror.GenericOpenAPIError{
+ StatusCode: 404,
+ }
+ }
+
+ return &sqlserverflex.GetInstanceResponse{
+ Id: &a.instanceId,
+ Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(&a.instanceState),
+ Network: &a.instanceNetwork,
+ }, nil
+}
+func TestCreateInstanceWaitHandler(t *testing.T) {
+ t.Skip("skipping - needs refactoring")
+ tests := []struct {
+ desc string
+ instanceGetFails bool
+ instanceState string
+ instanceNetwork sqlserverflex.InstanceNetwork
+ usersGetErrorStatus int
+ wantErr bool
+ wantRes *sqlserverflex.GetInstanceResponse
+ }{
+ {
+ desc: "create_succeeded",
+ instanceGetFails: false,
+ instanceState: InstanceStateSuccess,
+ instanceNetwork: sqlserverflex.InstanceNetwork{
+ AccessScope: nil,
+ Acl: nil,
+ InstanceAddress: utils.Ptr("10.0.0.1"),
+ RouterAddress: utils.Ptr("10.0.0.2"),
+ },
+ wantErr: false,
+ wantRes: &sqlserverflex.GetInstanceResponse{
+ BackupSchedule: nil,
+ Edition: nil,
+ Encryption: nil,
+ FlavorId: nil,
+ Id: nil,
+ IsDeletable: nil,
+ Name: nil,
+ Network: &sqlserverflex.InstanceNetwork{
+ AccessScope: nil,
+ Acl: nil,
+ InstanceAddress: utils.Ptr("10.0.0.1"),
+ RouterAddress: utils.Ptr("10.0.0.2"),
+ },
+ Replicas: nil,
+ RetentionDays: nil,
+ Status: nil,
+ Storage: nil,
+ Version: nil,
+ },
+ },
+ {
+ desc: "create_failed",
+ instanceGetFails: false,
+ instanceState: InstanceStateFailed,
+ wantErr: true,
+ wantRes: nil,
+ },
+ {
+ desc: "create_failed_2",
+ instanceGetFails: false,
+ instanceState: InstanceStateEmpty,
+ wantErr: true,
+ wantRes: nil,
+ },
+ {
+ desc: "instance_get_fails",
+ instanceGetFails: true,
+ wantErr: true,
+ wantRes: nil,
+ },
+ {
+ desc: "timeout",
+ instanceGetFails: false,
+ instanceState: InstanceStateProcessing,
+ wantErr: true,
+ wantRes: nil,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.desc, func(t *testing.T) {
+ instanceId := "foo-bar"
+
+ apiClient := &apiClientInstanceMocked{
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ instanceGetFails: tt.instanceGetFails,
+ }
+
+ handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
+
+ gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
+ if (err != nil) != tt.wantErr {
+ t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
+ }
+
+ if !cmp.Equal(gotRes, tt.wantRes) {
+ t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes)
+ }
+ })
+ }
+}
+
+func TestUpdateInstanceWaitHandler(t *testing.T) {
+ t.Skip("skipping - needs refactoring")
+ tests := []struct {
+ desc string
+ instanceGetFails bool
+ instanceState string
+ wantErr bool
+ wantResp bool
+ }{
+ {
+ desc: "update_succeeded",
+ instanceGetFails: false,
+ instanceState: InstanceStateSuccess,
+ wantErr: false,
+ wantResp: true,
+ },
+ {
+ desc: "update_failed",
+ instanceGetFails: false,
+ instanceState: InstanceStateFailed,
+ wantErr: true,
+ wantResp: true,
+ },
+ {
+ desc: "update_failed_2",
+ instanceGetFails: false,
+ instanceState: InstanceStateEmpty,
+ wantErr: true,
+ wantResp: true,
+ },
+ {
+ desc: "get_fails",
+ instanceGetFails: true,
+ wantErr: true,
+ wantResp: false,
+ },
+ {
+ desc: "timeout",
+ instanceGetFails: false,
+ instanceState: InstanceStateProcessing,
+ wantErr: true,
+ wantResp: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.desc, func(t *testing.T) {
+ instanceId := "foo-bar"
+
+ apiClient := &apiClientInstanceMocked{
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ instanceGetFails: tt.instanceGetFails,
+ }
+
+ var wantRes *sqlserverflex.GetInstanceResponse
+ if tt.wantResp {
+ wantRes = &sqlserverflex.GetInstanceResponse{
+ Id: &instanceId,
+ Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)),
+ }
+ }
+
+ handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
+
+ gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
+
+ if (err != nil) != tt.wantErr {
+ t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
+ }
+ if !cmp.Equal(gotRes, wantRes) {
+ t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes)
+ }
+ })
+ }
+}
+
+func TestDeleteInstanceWaitHandler(t *testing.T) {
+ tests := []struct {
+ desc string
+ instanceGetFails bool
+ instanceState string
+ wantErr bool
+ }{
+ {
+ desc: "delete_succeeded",
+ instanceGetFails: false,
+ instanceState: InstanceStateSuccess,
+ wantErr: false,
+ },
+ {
+ desc: "delete_failed",
+ instanceGetFails: false,
+ instanceState: InstanceStateFailed,
+ wantErr: true,
+ },
+ {
+ desc: "get_fails",
+ instanceGetFails: true,
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.desc, func(t *testing.T) {
+ instanceId := "foo-bar"
+
+ apiClient := &apiClientInstanceMocked{
+ instanceGetFails: tt.instanceGetFails,
+ instanceIsDeleted: tt.instanceState == InstanceStateSuccess,
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ }
+
+ handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
+
+ _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background())
+
+ if (err != nil) != tt.wantErr {
+ t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
+ }
+}
diff --git a/stackit/provider.go b/stackit/provider.go
index 22ade416..f05e204c 100644
--- a/stackit/provider.go
+++ b/stackit/provider.go
@@ -26,11 +26,15 @@ import (
postgresflexalphaFlavors "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors"
postgresFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
postgresFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user"
+ sqlserverFlexBetaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor"
+
sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database"
sqlserverFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor"
sqlServerFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user"
- sqlserverflexalphaVersion "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/version"
+
+ sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database"
+ sqlserverflexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance"
)
// Ensure the implementation satisfies the expected interfaces
@@ -502,11 +506,14 @@ func (p *Provider) DataSources(_ context.Context) []func() datasource.DataSource
postgresFlexAlphaUser.NewUserDataSource,
postgresflexalphaFlavors.NewFlavorsDataSource,
- sqlserverflexalphaVersion.NewVersionDataSource,
sqlserverFlexAlphaFlavor.NewFlavorDataSource,
sqlServerFlexAlphaInstance.NewInstanceDataSource,
sqlserverFlexAlphaUser.NewUserDataSource,
sqlserverflexalphaDatabase.NewDatabaseDataSource,
+
+ sqlserverflexBetaDatabase.NewDatabaseDataSource,
+ sqlserverflexBetaInstance.NewInstanceDataSource,
+ sqlserverFlexBetaFlavor.NewFlavorDataSource,
}
}
@@ -516,9 +523,13 @@ func (p *Provider) Resources(_ context.Context) []func() resource.Resource {
postgresFlexAlphaDatabase.NewDatabaseResource,
postgresFlexAlphaInstance.NewInstanceResource,
postgresFlexAlphaUser.NewUserResource,
+
sqlServerFlexAlphaInstance.NewInstanceResource,
sqlserverFlexAlphaUser.NewUserResource,
sqlserverflexalphaDatabase.NewDatabaseResource,
+
+ sqlserverflexBetaInstance.NewInstanceResource,
+ sqlserverflexBetaDatabase.NewDatabaseResource,
}
return resources
}
diff --git a/tools/tools.go b/tools/tools.go
index 7023ef96..075c26d5 100644
--- a/tools/tools.go
+++ b/tools/tools.go
@@ -1,9 +1,5 @@
package tools
-// Generate copyright headers
-// nolint:misspell // copywrite is correct here
-//go:generate go run github.com/hashicorp/copywrite headers -d .. --config ../.copywrite.hcl
-
// Format Terraform code for use in documentation.
// If you do not have Terraform installed, you can remove the formatting command, but it is suggested
// to ensure the documentation is formatted properly.