chore: work save
Some checks failed
CI Workflow / Check GoReleaser config (pull_request) Successful in 4s
CI Workflow / Test readiness for publishing provider (pull_request) Failing after 3m57s
CI Workflow / CI run tests (pull_request) Failing after 5m5s
CI Workflow / CI run build and linting (pull_request) Failing after 4m50s
CI Workflow / Code coverage report (pull_request) Has been skipped

This commit is contained in:
Marcel_Henselin 2026-03-05 15:11:15 +01:00
parent 411e99739a
commit d6d3a795bb
118 changed files with 3101 additions and 18065 deletions

View file

@ -10,14 +10,13 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
// Ensure the implementation satisfies the expected interfaces.
@ -38,7 +37,7 @@ type dataSourceModel struct {
// databaseDataSource is the data source implementation.
type databaseDataSource struct {
client *postgresflexalpha.APIClient
client *v3alpha1api.APIClient
providerData core.ProviderData
}
@ -144,7 +143,7 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
model *dataSourceModel,
projectId, region, instanceId string,
diags *diag.Diagnostics,
) (*postgresflexalpha.ListDatabase, error) {
) (*v3alpha1api.ListDatabase, error) {
isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
@ -157,14 +156,14 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
}
if isIdSet {
databaseId := model.DatabaseId.ValueInt64()
databaseId := model.DatabaseId.ValueInt32()
ctx = tflog.SetField(ctx, "database_id", databaseId)
return getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
return getDatabaseById(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseId)
}
databaseName := model.Name.ValueString()
ctx = tflog.SetField(ctx, "name", databaseName)
return getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName)
return getDatabaseByName(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseName)
}
// handleReadError centralizes API error handling for the Read operation.

View file

@ -14,12 +14,12 @@ import (
func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
"database_id": schema.Int64Attribute{
"database_id": schema.Int32Attribute{
Required: true,
Description: "The ID of the database.",
MarkdownDescription: "The ID of the database.",
},
"tf_original_api_id": schema.Int64Attribute{
"tf_original_api_id": schema.Int32Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@ -59,8 +59,8 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
}
type DatabaseModel struct {
DatabaseId types.Int64 `tfsdk:"database_id"`
Id types.Int64 `tfsdk:"tf_original_api_id"`
DatabaseId types.Int32 `tfsdk:"database_id"`
Id types.Int32 `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Owner types.String `tfsdk:"owner"`

View file

@ -23,7 +23,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
"databases": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"id": schema.Int64Attribute{
"id": schema.Int32Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@ -54,7 +54,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
},
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@ -62,19 +62,19 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Computed: true,
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
"total_pages": schema.Int64Attribute{
"total_pages": schema.Int32Attribute{
Computed: true,
},
"total_rows": schema.Int64Attribute{
"total_rows": schema.Int32Attribute{
Computed: true,
},
},
@ -100,7 +100,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@ -131,11 +131,11 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
type DatabasesModel struct {
Databases types.List `tfsdk:"databases"`
InstanceId types.String `tfsdk:"instance_id"`
Page types.Int64 `tfsdk:"page"`
Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Size types.Int64 `tfsdk:"size"`
Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@ -174,12 +174,12 @@ func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectV
return nil, diags
}
idVal, ok := idAttribute.(basetypes.Int64Value)
idVal, ok := idAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@ -303,12 +303,12 @@ func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[strin
return NewDatabasesValueUnknown(), diags
}
idVal, ok := idAttribute.(basetypes.Int64Value)
idVal, ok := idAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@ -427,7 +427,7 @@ func (t DatabasesType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = DatabasesValue{}
type DatabasesValue struct {
Id basetypes.Int64Value `tfsdk:"id"`
Id basetypes.Int32Value `tfsdk:"id"`
Name basetypes.StringValue `tfsdk:"name"`
Owner basetypes.StringValue `tfsdk:"owner"`
state attr.ValueState
@ -439,7 +439,7 @@ func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, er
var val tftypes.Value
var err error
attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["id"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx)
@ -503,7 +503,7 @@ func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValu
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"id": basetypes.Int64Type{},
"id": basetypes.Int32Type{},
"name": basetypes.StringType{},
"owner": basetypes.StringType{},
}
@ -567,7 +567,7 @@ func (v DatabasesValue) Type(ctx context.Context) attr.Type {
func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"id": basetypes.Int64Type{},
"id": basetypes.Int32Type{},
"name": basetypes.StringType{},
"owner": basetypes.StringType{},
}
@ -608,12 +608,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -626,12 +626,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -662,12 +662,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -680,12 +680,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -775,12 +775,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -793,12 +793,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -829,12 +829,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -847,12 +847,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -937,11 +937,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int64Value `tfsdk:"size"`
Page basetypes.Int32Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@ -951,11 +951,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1033,11 +1033,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@ -1109,10 +1109,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
}

View file

@ -5,7 +5,7 @@ import (
"fmt"
"strings"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
)
// databaseClientReader represents the contract to listing databases from postgresflex.APIClient.
@ -15,7 +15,7 @@ type databaseClientReader interface {
projectId string,
region string,
instanceId string,
) postgresflex.ApiListDatabasesRequestRequest
) v3alpha1api.ApiListDatabasesRequestRequest
}
// getDatabaseById gets a database by its ID.
@ -23,10 +23,10 @@ func getDatabaseById(
ctx context.Context,
client databaseClientReader,
projectId, region, instanceId string,
databaseId int64,
) (*postgresflex.ListDatabase, error) {
filter := func(db postgresflex.ListDatabase) bool {
return db.Id != nil && *db.Id == databaseId
databaseId int32,
) (*v3alpha1api.ListDatabase, error) {
filter := func(db v3alpha1api.ListDatabase) bool {
return db.Id == databaseId
}
return getDatabase(ctx, client, projectId, region, instanceId, filter)
}
@ -36,9 +36,9 @@ func getDatabaseByName(
ctx context.Context,
client databaseClientReader,
projectId, region, instanceId, databaseName string,
) (*postgresflex.ListDatabase, error) {
filter := func(db postgresflex.ListDatabase) bool {
return db.Name != nil && *db.Name == databaseName
) (*v3alpha1api.ListDatabase, error) {
filter := func(db v3alpha1api.ListDatabase) bool {
return db.Name == databaseName
}
return getDatabase(ctx, client, projectId, region, instanceId, filter)
}
@ -49,8 +49,8 @@ func getDatabase(
ctx context.Context,
client databaseClientReader,
projectId, region, instanceId string,
filter func(db postgresflex.ListDatabase) bool,
) (*postgresflex.ListDatabase, error) {
filter func(db v3alpha1api.ListDatabase) bool,
) (*v3alpha1api.ListDatabase, error) {
if projectId == "" || region == "" || instanceId == "" {
return nil, fmt.Errorf("all parameters (project, region, instance) are required")
}
@ -59,18 +59,18 @@ func getDatabase(
for page := int32(1); ; page++ {
res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId).
Page(page).Size(pageSize).Sort(postgresflex.DATABASESORT_DATABASE_ID_ASC).Execute()
Page(page).Size(pageSize).Sort(v3alpha1api.DATABASESORT_DATABASE_ID_ASC).Execute()
if err != nil {
return nil, fmt.Errorf("requesting database list (page %d): %w", page, err)
}
// If the API returns no databases, we have reached the end of the list.
if res.Databases == nil || len(*res.Databases) == 0 {
if res.Databases == nil || len(res.Databases) == 0 {
break
}
// Iterate over databases to find a match
for _, db := range *res.Databases {
for _, db := range res.Databases {
if filter(db) {
foundDb := db
return &foundDb, nil
@ -82,10 +82,6 @@ func getDatabase(
}
// cleanString removes leading and trailing quotes which are sometimes returned by the API.
func cleanString(s *string) *string {
if s == nil {
return nil
}
res := strings.Trim(*s, "\"")
return &res
func cleanString(s string) string {
return strings.Trim(s, "\"")
}

View file

@ -7,7 +7,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
)
type mockRequest struct {
@ -37,28 +37,28 @@ func (m *mockDBClient) ListDatabasesRequest(
}
func TestGetDatabase(t *testing.T) {
mockResp := func(page int64) (*postgresflex.ListDatabasesResponse, error) {
mockResp := func(page int32) (*postgresflex.ListDatabasesResponse, error) {
if page == 1 {
return &postgresflex.ListDatabasesResponse{
Databases: &[]postgresflex.ListDatabase{
{Id: utils.Ptr(int64(1)), Name: utils.Ptr("first")},
{Id: utils.Ptr(int64(2)), Name: utils.Ptr("second")},
{Id: utils.Ptr(int32(1)), Name: utils.Ptr("first")},
{Id: utils.Ptr(int32(2)), Name: utils.Ptr("second")},
},
Pagination: &postgresflex.Pagination{
Page: utils.Ptr(int64(1)),
TotalPages: utils.Ptr(int64(2)),
Size: utils.Ptr(int64(3)),
Page: utils.Ptr(int32(1)),
TotalPages: utils.Ptr(int32(2)),
Size: utils.Ptr(int32(3)),
},
}, nil
}
if page == 2 {
return &postgresflex.ListDatabasesResponse{
Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int64(3)), Name: utils.Ptr("three")}},
Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int32(3)), Name: utils.Ptr("three")}},
Pagination: &postgresflex.Pagination{
Page: utils.Ptr(int64(2)),
TotalPages: utils.Ptr(int64(2)),
Size: utils.Ptr(int64(3)),
Page: utils.Ptr(int32(2)),
TotalPages: utils.Ptr(int32(2)),
Size: utils.Ptr(int32(3)),
},
}, nil
}
@ -66,9 +66,9 @@ func TestGetDatabase(t *testing.T) {
return &postgresflex.ListDatabasesResponse{
Databases: &[]postgresflex.ListDatabase{},
Pagination: &postgresflex.Pagination{
Page: utils.Ptr(int64(3)),
TotalPages: utils.Ptr(int64(2)),
Size: utils.Ptr(int64(3)),
Page: utils.Ptr(int32(3)),
TotalPages: utils.Ptr(int32(2)),
Size: utils.Ptr(int32(3)),
},
}, nil
}
@ -80,7 +80,7 @@ func TestGetDatabase(t *testing.T) {
instanceId string
wantErr bool
wantDbName string
wantDbId int64
wantDbId int32
}{
{
description: "Success - Found by name on first page",
@ -133,7 +133,7 @@ func TestGetDatabase(t *testing.T) {
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
var currentPage int64
var currentPage int32
client := &mockDBClient{
executeRequest: func() postgresflex.ApiListDatabasesRequestRequest {
return &mockRequest{

View file

@ -2,43 +2,42 @@ package postgresflexalpha
import (
"fmt"
"strconv"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
func mapFields(
source *postgresflexalpha.ListDatabase,
source *v3alpha1api.ListDatabase,
model *dataSourceModel,
region string,
) error {
if source == nil {
return fmt.Errorf("response is nil")
}
if source.Id == nil || *source.Id == 0 {
if source.Id == 0 {
return fmt.Errorf("id not present")
}
if model == nil {
return fmt.Errorf("model given is nil")
}
var databaseId int64
if model.DatabaseId.ValueInt64() != 0 {
databaseId = model.DatabaseId.ValueInt64()
} else if source.Id != nil {
databaseId = *source.Id
var databaseId int32
if model.DatabaseId.ValueInt32() != 0 {
databaseId = model.DatabaseId.ValueInt32()
} else if source.Id != 0 {
databaseId = source.Id
} else {
return fmt.Errorf("database id not present")
}
model.Id = types.Int64Value(databaseId)
model.DatabaseId = types.Int64Value(databaseId)
model.Id = types.Int32Value(databaseId)
model.DatabaseId = types.Int32Value(databaseId)
model.Name = types.StringValue(source.GetName())
model.Owner = types.StringPointerValue(cleanString(source.Owner))
model.Owner = types.StringValue(cleanString(source.Owner))
model.Region = types.StringValue(region)
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
@ -46,48 +45,48 @@ func mapFields(
model.ProjectId.ValueString(),
region,
model.InstanceId.ValueString(),
strconv.FormatInt(databaseId, 10),
string(databaseId),
)
return nil
}
// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource.
func mapResourceFields(source *postgresflexalpha.GetDatabaseResponse, model *resourceModel) error {
func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceModel) error {
if source == nil {
return fmt.Errorf("response is nil")
}
if source.Id == nil || *source.Id == 0 {
if source.Id == 0 {
return fmt.Errorf("id not present")
}
if model == nil {
return fmt.Errorf("model input is nil")
}
var databaseId int64
if model.Id.ValueInt64() != 0 {
databaseId = model.Id.ValueInt64()
} else if source.Id != nil {
databaseId = *source.Id
var databaseId int32
if model.Id.ValueInt32() != 0 {
databaseId = model.Id.ValueInt32()
} else if source.Id != 0 {
databaseId = source.Id
} else {
return fmt.Errorf("database id not present")
}
model.Id = types.Int64Value(databaseId)
model.DatabaseId = types.Int64Value(databaseId)
model.Id = types.Int32Value(databaseId)
model.DatabaseId = types.Int32Value(databaseId)
model.Name = types.StringValue(source.GetName())
model.Owner = types.StringPointerValue(cleanString(source.Owner))
model.Owner = types.StringValue(cleanString(source.Owner))
return nil
}
// toCreatePayload converts the resource model to an API create payload.
func toCreatePayload(model *resourceModel) (*postgresflexalpha.CreateDatabaseRequestPayload, error) {
func toCreatePayload(model *resourceModel) (*v3alpha1api.CreateDatabaseRequestPayload, error) {
if model == nil {
return nil, fmt.Errorf("nil model")
}
return &postgresflexalpha.CreateDatabaseRequestPayload{
Name: model.Name.ValueStringPointer(),
return &v3alpha1api.CreateDatabaseRequestPayload{
Name: model.Name.ValueString(),
Owner: model.Owner.ValueStringPointer(),
}, nil
}

View file

@ -7,7 +7,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
)
@ -31,7 +31,7 @@ func TestMapFields(t *testing.T) {
name: "should map fields correctly",
given: given{
source: &postgresflexalpha.ListDatabase{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(int32(1)),
Name: utils.Ptr("my-db"),
Owner: utils.Ptr("\"my-owner\""),
},
@ -46,11 +46,11 @@ func TestMapFields(t *testing.T) {
expected: expected{
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
Id: types.Int64Value(1),
Id: types.Int32Value(1),
Name: types.StringValue("my-db"),
Owner: types.StringValue("my-owner"),
Region: types.StringValue("eu01"),
DatabaseId: types.Int64Value(1),
DatabaseId: types.Int32Value(1),
InstanceId: types.StringValue("my-instance"),
ProjectId: types.StringValue("my-project"),
},
@ -62,12 +62,12 @@ func TestMapFields(t *testing.T) {
name: "should preserve existing model ID",
given: given{
source: &postgresflexalpha.ListDatabase{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(int32(1)),
Name: utils.Ptr("my-db"),
},
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
Id: types.Int64Value(1),
Id: types.Int32Value(1),
ProjectId: types.StringValue("my-project"),
InstanceId: types.StringValue("my-instance"),
},
@ -77,9 +77,9 @@ func TestMapFields(t *testing.T) {
expected: expected{
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
Id: types.Int64Value(1),
Id: types.Int32Value(1),
Name: types.StringValue("my-db"),
Owner: types.StringNull(), DatabaseId: types.Int64Value(1),
Owner: types.StringNull(), DatabaseId: types.Int32Value(1),
Region: types.StringValue("eu01"),
InstanceId: types.StringValue("my-instance"),
ProjectId: types.StringValue("my-project"),
@ -107,7 +107,7 @@ func TestMapFields(t *testing.T) {
{
name: "should fail on nil model",
given: given{
source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(int64(1))},
source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(Int32(1))},
model: nil,
},
expected: expected{err: true},
@ -150,7 +150,7 @@ func TestMapResourceFields(t *testing.T) {
name: "should map fields correctly",
given: given{
source: &postgresflexalpha.GetDatabaseResponse{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(Int32(1)),
Name: utils.Ptr("my-db"),
Owner: utils.Ptr("my-owner"),
},
@ -158,10 +158,10 @@ func TestMapResourceFields(t *testing.T) {
},
expected: expected{
model: &resourceModel{
Id: types.Int64Value(1),
Id: types.Int32Value(1),
Name: types.StringValue("my-db"),
Owner: types.StringValue("my-owner"),
DatabaseId: types.Int64Value(1),
DatabaseId: types.Int32Value(1),
},
},
},

View file

@ -14,14 +14,14 @@ import (
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
postgresflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
postgresflexalpha3 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
)
var (
@ -43,19 +43,19 @@ func NewDatabaseResource() resource.Resource {
}
// resourceModel describes the resource data model.
type resourceModel = postgresflexalpha2.DatabaseModel
type resourceModel = postgresflexalphaResGen.DatabaseModel
// DatabaseResourceIdentityModel describes the resource's identity attributes.
type DatabaseResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
InstanceID types.String `tfsdk:"instance_id"`
DatabaseID types.Int64 `tfsdk:"database_id"`
DatabaseID types.Int32 `tfsdk:"database_id"`
}
// databaseResource is the resource implementation.
type databaseResource struct {
client *postgresflexalpha.APIClient
client *v3alpha1api.APIClient
providerData core.ProviderData
}
@ -122,7 +122,7 @@ var modifiersFileByte []byte
// Schema defines the schema for the resource.
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
s := postgresflexalpha2.DatabaseResourceSchema(ctx)
s := postgresflexalphaResGen.DatabaseResourceSchema(ctx)
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
if err != nil {
@ -155,7 +155,7 @@ func (r *databaseResource) IdentitySchema(
"instance_id": identityschema.StringAttribute{
RequiredForImport: true,
},
"database_id": identityschema.Int64Attribute{
"database_id": identityschema.Int32Attribute{
RequiredForImport: true,
},
},
@ -198,7 +198,7 @@ func (r *databaseResource) Create(
return
}
// Create new database
databaseResp, err := r.client.CreateDatabaseRequest(
databaseResp, err := r.client.DefaultAPI.CreateDatabaseRequest(
ctx,
projectId,
region,
@ -209,16 +209,17 @@ func (r *databaseResource) Create(
return
}
if databaseResp == nil || databaseResp.Id == nil {
dbID, ok := databaseResp.GetIdOk()
if !ok {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
funcErrorSummary,
"API didn't return database Id. A database might have been created",
"API didn't return database Id. A database might although have been created",
)
return
}
databaseId := *databaseResp.Id
databaseId := *dbID
ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = core.LogResponse(ctx)
@ -227,14 +228,14 @@ func (r *databaseResource) Create(
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int64Value(databaseId),
DatabaseID: types.Int32Value(int32(databaseId)),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
return
}
database, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
database, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx)
@ -286,14 +287,14 @@ func (r *databaseResource) Read(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
region := model.Region.ValueString()
databaseId := model.DatabaseId.ValueInt64()
databaseId := model.DatabaseId.ValueInt32()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "database_id", databaseId)
databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx)
@ -327,7 +328,7 @@ func (r *databaseResource) Read(
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int64Value(databaseId),
DatabaseID: types.Int32Value(databaseId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@ -361,13 +362,7 @@ func (r *databaseResource) Update(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
region := model.Region.ValueString()
databaseId64 := model.DatabaseId.ValueInt64()
if databaseId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
return
}
databaseId := int32(databaseId64) // nolint:gosec // check is performed above
databaseId := model.DatabaseId.ValueInt32()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
@ -383,7 +378,7 @@ func (r *databaseResource) Update(
}
modified := false
var payload postgresflexalpha.UpdateDatabasePartiallyRequestPayload
var payload v3alpha1api.UpdateDatabasePartiallyRequestPayload
if stateModel.Name != model.Name {
payload.Name = model.Name.ValueStringPointer()
modified = true
@ -400,7 +395,7 @@ func (r *databaseResource) Update(
}
// Update existing database
err := r.client.UpdateDatabasePartiallyRequest(
err := r.client.DefaultAPI.UpdateDatabasePartiallyRequest(
ctx,
projectId,
region,
@ -414,7 +409,7 @@ func (r *databaseResource) Update(
ctx = core.LogResponse(ctx)
databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId64).
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx)
@ -442,7 +437,7 @@ func (r *databaseResource) Update(
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int64Value(databaseId64),
DatabaseID: types.Int32Value(databaseId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@ -500,7 +495,7 @@ func (r *databaseResource) Delete(
ctx = tflog.SetField(ctx, "database_id", databaseId)
// Delete existing record set
err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseId)
err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseId).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
}
@ -572,7 +567,7 @@ func (r *databaseResource) ImportState(
projectId := identityData.ProjectID.ValueString()
region := identityData.Region.ValueString()
instanceId := identityData.InstanceID.ValueString()
databaseId := identityData.DatabaseID.ValueInt64()
databaseId := identityData.DatabaseID.ValueInt32()
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
@ -586,14 +581,14 @@ func (r *databaseResource) ImportState(
func (r *databaseResource) extractIdentityData(
model resourceModel,
identity DatabaseResourceIdentityModel,
) (projectId, region, instanceId string, databaseId int64, err error) {
) (projectId, region, instanceId string, databaseId int32, err error) {
if !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() {
databaseId = model.DatabaseId.ValueInt64()
databaseId = model.DatabaseId.ValueInt32()
} else {
if identity.DatabaseID.IsNull() || identity.DatabaseID.IsUnknown() {
return "", "", "", 0, fmt.Errorf("database_id not found in config")
}
databaseId = identity.DatabaseID.ValueInt64()
databaseId = identity.DatabaseID.ValueInt32()
}
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {

View file

@ -14,13 +14,13 @@ import (
func DatabaseResourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
"database_id": schema.Int64Attribute{
"database_id": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "The ID of the database.",
MarkdownDescription: "The ID of the database.",
},
"id": schema.Int64Attribute{
"id": schema.Int32Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@ -64,8 +64,8 @@ func DatabaseResourceSchema(ctx context.Context) schema.Schema {
}
type DatabaseModel struct {
DatabaseId types.Int64 `tfsdk:"database_id"`
Id types.Int64 `tfsdk:"id"`
DatabaseId types.Int32 `tfsdk:"database_id"`
Id types.Int32 `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Owner types.String `tfsdk:"owner"`

View file

@ -8,8 +8,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@ -30,13 +30,13 @@ type FlavorModel struct {
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
StorageClass types.String `tfsdk:"storage_class"`
Cpu types.Int64 `tfsdk:"cpu"`
Cpu types.Int32 `tfsdk:"cpu"`
Description types.String `tfsdk:"description"`
Id types.String `tfsdk:"id"`
FlavorId types.String `tfsdk:"flavor_id"`
MaxGb types.Int64 `tfsdk:"max_gb"`
Memory types.Int64 `tfsdk:"ram"`
MinGb types.Int64 `tfsdk:"min_gb"`
MaxGb types.Int32 `tfsdk:"max_gb"`
Memory types.Int32 `tfsdk:"ram"`
MinGb types.Int32 `tfsdk:"min_gb"`
NodeType types.String `tfsdk:"node_type"`
StorageClasses types.List `tfsdk:"storage_classes"`
}
@ -48,7 +48,7 @@ func NewFlavorDataSource() datasource.DataSource {
// flavorDataSource is the data source implementation.
type flavorDataSource struct {
client *postgresflexalpha.APIClient
client *v3alpha1api.APIClient
providerData core.ProviderData
}
@ -86,12 +86,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor description.",
MarkdownDescription: "The flavor description.",
},
"cpu": schema.Int64Attribute{
"cpu": schema.Int32Attribute{
Required: true,
Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.",
},
"ram": schema.Int64Attribute{
"ram": schema.Int32Attribute{
Required: true,
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
@ -116,12 +116,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor id of the instance flavor.",
MarkdownDescription: "The flavor id of the instance flavor.",
},
"max_gb": schema.Int64Attribute{
"max_gb": schema.Int32Attribute{
Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
"min_gb": schema.Int64Attribute{
"min_gb": schema.Int32Attribute{
Computed: true,
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@ -138,10 +138,10 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
"class": schema.StringAttribute{
Computed: true,
},
"max_io_per_sec": schema.Int64Attribute{
"max_io_per_sec": schema.Int32Attribute{
Computed: true,
},
"max_through_in_mb": schema.Int64Attribute{
"max_through_in_mb": schema.Int32Attribute{
Computed: true,
},
},
@ -171,25 +171,25 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region)
flavors, err := getAllFlavors(ctx, r.client, projectId, region)
flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
return
}
var foundFlavors []postgresflexalpha.ListFlavors
var foundFlavors []v3alpha1api.ListFlavors
for _, flavor := range flavors {
if model.Cpu.ValueInt64() != *flavor.Cpu {
if model.Cpu.ValueInt32() != flavor.Cpu {
continue
}
if model.Memory.ValueInt64() != *flavor.Memory {
if model.Memory.ValueInt32() != flavor.Memory {
continue
}
if model.NodeType.ValueString() != *flavor.NodeType {
if model.NodeType.ValueString() != flavor.NodeType {
continue
}
for _, sc := range *flavor.StorageClasses {
if model.StorageClass.ValueString() != *sc.Class {
for _, sc := range flavor.StorageClasses {
if model.StorageClass.ValueString() != sc.Class {
continue
}
foundFlavors = append(foundFlavors, flavor)
@ -205,11 +205,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
}
f := foundFlavors[0]
model.Description = types.StringValue(*f.Description)
model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
model.FlavorId = types.StringValue(*f.Id)
model.MaxGb = types.Int64Value(*f.MaxGB)
model.MinGb = types.Int64Value(*f.MinGB)
model.Description = types.StringValue(f.Description)
model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id)
model.FlavorId = types.StringValue(f.Id)
model.MaxGb = types.Int32Value(f.MaxGB)
model.MinGb = types.Int32Value(f.MinGB)
if f.StorageClasses == nil {
model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{
@ -219,15 +219,15 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
})
} else {
var scList []attr.Value
for _, sc := range *f.StorageClasses {
for _, sc := range f.StorageClasses {
scList = append(
scList,
postgresflexalphaGen.NewStorageClassesValueMust(
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"class": types.StringValue(*sc.Class),
"max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
"max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
"class": types.StringValue(sc.Class),
"max_io_per_sec": types.Int32Value(sc.MaxIoPerSec),
"max_through_in_mb": types.Int32Value(sc.MaxThroughInMb),
},
),
)

View file

@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"flavors": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"cpu": schema.Int64Attribute{
"cpu": schema.Int32Attribute{
Computed: true,
Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.",
@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
"max_gb": schema.Int64Attribute{
"max_gb": schema.Int32Attribute{
Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
"memory": schema.Int64Attribute{
"memory": schema.Int32Attribute{
Computed: true,
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
},
"min_gb": schema.Int64Attribute{
"min_gb": schema.Int32Attribute{
Computed: true,
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"class": schema.StringAttribute{
Computed: true,
},
"max_io_per_sec": schema.Int64Attribute{
"max_io_per_sec": schema.Int32Attribute{
Computed: true,
},
"max_through_in_mb": schema.Int64Attribute{
"max_through_in_mb": schema.Int32Attribute{
Computed: true,
},
},
@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of flavors available for the project.",
MarkdownDescription: "List of flavors available for the project.",
},
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Computed: true,
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
"total_pages": schema.Int64Attribute{
"total_pages": schema.Int32Attribute{
Computed: true,
},
"total_rows": schema.Int64Attribute{
"total_rows": schema.Int32Attribute{
Computed: true,
},
},
@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@ -178,11 +178,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
type FlavorsModel struct {
Flavors types.List `tfsdk:"flavors"`
Page types.Int64 `tfsdk:"page"`
Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Size types.Int64 `tfsdk:"size"`
Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@ -221,12 +221,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@ -275,12 +275,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@ -293,12 +293,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@ -311,12 +311,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@ -445,12 +445,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@ -499,12 +499,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@ -517,12 +517,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@ -535,12 +535,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@ -664,12 +664,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = FlavorsValue{}
type FlavorsValue struct {
Cpu basetypes.Int64Value `tfsdk:"cpu"`
Cpu basetypes.Int32Value `tfsdk:"cpu"`
Description basetypes.StringValue `tfsdk:"description"`
Id basetypes.StringValue `tfsdk:"id"`
MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
Memory basetypes.Int64Value `tfsdk:"memory"`
MinGb basetypes.Int64Value `tfsdk:"min_gb"`
MaxGb basetypes.Int32Value `tfsdk:"max_gb"`
Memory basetypes.Int32Value `tfsdk:"memory"`
MinGb basetypes.Int32Value `tfsdk:"min_gb"`
NodeType basetypes.StringValue `tfsdk:"node_type"`
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
state attr.ValueState
@ -681,12 +681,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var val tftypes.Value
var err error
attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["storage_classes"] = basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@ -821,12 +821,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
}
attributeTypes := map[string]attr.Type{
"cpu": basetypes.Int64Type{},
"cpu": basetypes.Int32Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
"max_gb": basetypes.Int64Type{},
"memory": basetypes.Int64Type{},
"min_gb": basetypes.Int64Type{},
"max_gb": basetypes.Int32Type{},
"memory": basetypes.Int32Type{},
"min_gb": basetypes.Int32Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@ -917,12 +917,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"cpu": basetypes.Int64Type{},
"cpu": basetypes.Int32Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
"max_gb": basetypes.Int64Type{},
"memory": basetypes.Int64Type{},
"min_gb": basetypes.Int64Type{},
"max_gb": basetypes.Int32Type{},
"memory": basetypes.Int32Type{},
"min_gb": basetypes.Int32Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@ -983,12 +983,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -1001,12 +1001,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@ -1112,12 +1112,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -1130,12 +1130,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@ -1219,8 +1219,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
type StorageClassesValue struct {
Class basetypes.StringValue `tfsdk:"class"`
MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"`
MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"`
state attr.ValueState
}
@ -1231,8 +1231,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
var err error
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1295,8 +1295,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
attributeTypes := map[string]attr.Type{
"class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int64Type{},
"max_io_per_sec": basetypes.Int32Type{},
"max_through_in_mb": basetypes.Int32Type{},
}
if v.IsNull() {
@ -1359,8 +1359,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int64Type{},
"max_io_per_sec": basetypes.Int32Type{},
"max_through_in_mb": basetypes.Int32Type{},
}
}
@ -1399,12 +1399,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -1417,12 +1417,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -1453,12 +1453,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -1471,12 +1471,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -1566,12 +1566,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -1584,12 +1584,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -1620,12 +1620,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -1638,12 +1638,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -1728,11 +1728,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int64Value `tfsdk:"size"`
Page basetypes.Int32Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@ -1742,11 +1742,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1824,11 +1824,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@ -1900,10 +1900,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
}

View file

@ -4,21 +4,21 @@ import (
"context"
"fmt"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
)
type flavorsClientReader interface {
GetFlavorsRequest(
ctx context.Context,
projectId, region string,
) postgresflex.ApiGetFlavorsRequestRequest
) v3alpha1api.ApiGetFlavorsRequestRequest
}
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
[]postgresflex.ListFlavors,
[]v3alpha1api.ListFlavors,
error,
) {
getAllFilter := func(_ postgresflex.ListFlavors) bool { return true }
getAllFilter := func(_ v3alpha1api.ListFlavors) bool { return true }
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
if err != nil {
return nil, err
@ -32,29 +32,29 @@ func getFlavorsByFilter(
ctx context.Context,
client flavorsClientReader,
projectId, region string,
filter func(db postgresflex.ListFlavors) bool,
) ([]postgresflex.ListFlavors, error) {
filter func(db v3alpha1api.ListFlavors) bool,
) ([]v3alpha1api.ListFlavors, error) {
if projectId == "" || region == "" {
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
}
const pageSize = 25
var result = make([]postgresflex.ListFlavors, 0)
var result = make([]v3alpha1api.ListFlavors, 0)
for page := int32(1); ; page++ {
res, err := client.GetFlavorsRequest(ctx, projectId, region).
Page(page).Size(pageSize).Sort(postgresflex.FLAVORSORT_ID_ASC).Execute()
Page(page).Size(pageSize).Sort(v3alpha1api.FLAVORSORT_ID_ASC).Execute()
if err != nil {
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
}
// If the API returns no flavors, we have reached the end of the list.
if res.Flavors == nil || len(*res.Flavors) == 0 {
if len(res.Flavors) == 0 {
break
}
for _, flavor := range *res.Flavors {
for _, flavor := range res.Flavors {
if filter(flavor) {
result = append(result, flavor)
}

View file

@ -4,9 +4,7 @@ import (
"context"
"testing"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
)
type mockRequest struct {
@ -30,25 +28,25 @@ func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) po
return m.executeRequest()
}
var mockResp = func(page int64) (*postgresflex.GetFlavorsResponse, error) {
var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) {
if page == 1 {
return &postgresflex.GetFlavorsResponse{
Flavors: &[]postgresflex.ListFlavors{
{Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
{Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
Flavors: []postgresflex.ListFlavors{
{Id: "flavor-1", Description: "first"},
{Id: "flavor-2", Description: "second"},
},
}, nil
}
if page == 2 {
return &postgresflex.GetFlavorsResponse{
Flavors: &[]postgresflex.ListFlavors{
{Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
Flavors: []postgresflex.ListFlavors{
{Id: "flavor-3", Description: "three"},
},
}, nil
}
return &postgresflex.GetFlavorsResponse{
Flavors: &[]postgresflex.ListFlavors{},
Flavors: []postgresflex.ListFlavors{},
}, nil
}
@ -72,7 +70,7 @@ func TestGetFlavorsByFilter(t *testing.T) {
{
description: "Success - Filter flavors by description",
projectId: "pid", region: "reg",
filter: func(f postgresflex.ListFlavors) bool { return *f.Description == "first" },
filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" },
wantCount: 1,
wantErr: false,
},
@ -86,10 +84,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
var currentPage int64
var currentPage int32
client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
return &mockRequest{
return mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++
return mockResp(currentPage)
@ -113,10 +111,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
}
func TestGetAllFlavors(t *testing.T) {
var currentPage int64
var currentPage int32
client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
return &mockRequest{
return mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++
return mockResp(currentPage)

View file

@ -5,8 +5,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
@ -26,7 +26,7 @@ func NewFlavorsDataSource() datasource.DataSource {
type dataSourceModel = postgresflexalphaGen.FlavorsModel
type flavorsDataSource struct {
client *postgresflexalpha.APIClient
client *v3alpha1api.APIClient
providerData core.ProviderData
}

View file

@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"flavors": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"cpu": schema.Int64Attribute{
"cpu": schema.Int32Attribute{
Computed: true,
Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.",
@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
"max_gb": schema.Int64Attribute{
"max_gb": schema.Int32Attribute{
Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
"memory": schema.Int64Attribute{
"memory": schema.Int32Attribute{
Computed: true,
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
},
"min_gb": schema.Int64Attribute{
"min_gb": schema.Int32Attribute{
Computed: true,
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"class": schema.StringAttribute{
Computed: true,
},
"max_io_per_sec": schema.Int64Attribute{
"max_io_per_sec": schema.Int32Attribute{
Computed: true,
},
"max_through_in_mb": schema.Int64Attribute{
"max_through_in_mb": schema.Int32Attribute{
Computed: true,
},
},
@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of flavors available for the project.",
MarkdownDescription: "List of flavors available for the project.",
},
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Computed: true,
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
"total_pages": schema.Int64Attribute{
"total_pages": schema.Int32Attribute{
Computed: true,
},
"total_rows": schema.Int64Attribute{
"total_rows": schema.Int32Attribute{
Computed: true,
},
},
@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@ -176,11 +176,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
type FlavorsModel struct {
Flavors types.List `tfsdk:"flavors"`
Page types.Int64 `tfsdk:"page"`
Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Size types.Int64 `tfsdk:"size"`
Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@ -219,12 +219,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@ -273,12 +273,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@ -291,12 +291,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@ -309,12 +309,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@ -443,12 +443,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@ -497,12 +497,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@ -515,12 +515,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@ -533,12 +533,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@ -662,12 +662,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = FlavorsValue{}
type FlavorsValue struct {
Cpu basetypes.Int64Value `tfsdk:"cpu"`
Cpu basetypes.Int32Value `tfsdk:"cpu"`
Description basetypes.StringValue `tfsdk:"description"`
Id basetypes.StringValue `tfsdk:"id"`
MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
Memory basetypes.Int64Value `tfsdk:"memory"`
MinGb basetypes.Int64Value `tfsdk:"min_gb"`
MaxGb basetypes.Int32Value `tfsdk:"max_gb"`
Memory basetypes.Int32Value `tfsdk:"memory"`
MinGb basetypes.Int32Value `tfsdk:"min_gb"`
NodeType basetypes.StringValue `tfsdk:"node_type"`
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
state attr.ValueState
@ -679,12 +679,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var val tftypes.Value
var err error
attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["storage_classes"] = basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@ -819,12 +819,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
}
attributeTypes := map[string]attr.Type{
"cpu": basetypes.Int64Type{},
"cpu": basetypes.Int32Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
"max_gb": basetypes.Int64Type{},
"memory": basetypes.Int64Type{},
"min_gb": basetypes.Int64Type{},
"max_gb": basetypes.Int32Type{},
"memory": basetypes.Int32Type{},
"min_gb": basetypes.Int32Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@ -915,12 +915,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"cpu": basetypes.Int64Type{},
"cpu": basetypes.Int32Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
"max_gb": basetypes.Int64Type{},
"memory": basetypes.Int64Type{},
"min_gb": basetypes.Int64Type{},
"max_gb": basetypes.Int32Type{},
"memory": basetypes.Int32Type{},
"min_gb": basetypes.Int32Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@ -981,12 +981,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -999,12 +999,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@ -1110,12 +1110,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -1128,12 +1128,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@ -1217,8 +1217,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
type StorageClassesValue struct {
Class basetypes.StringValue `tfsdk:"class"`
MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"`
MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"`
state attr.ValueState
}
@ -1229,8 +1229,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
var err error
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1293,8 +1293,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
attributeTypes := map[string]attr.Type{
"class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int64Type{},
"max_io_per_sec": basetypes.Int32Type{},
"max_through_in_mb": basetypes.Int32Type{},
}
if v.IsNull() {
@ -1357,8 +1357,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int64Type{},
"max_io_per_sec": basetypes.Int32Type{},
"max_through_in_mb": basetypes.Int32Type{},
}
}
@ -1397,12 +1397,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -1415,12 +1415,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -1451,12 +1451,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -1469,12 +1469,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -1564,12 +1564,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -1582,12 +1582,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -1618,12 +1618,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -1636,12 +1636,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -1726,11 +1726,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int64Value `tfsdk:"size"`
Page basetypes.Int32Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@ -1740,11 +1740,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1822,11 +1822,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@ -1898,10 +1898,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
}

View file

@ -6,8 +6,8 @@ import (
"net/http"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@ -37,7 +37,7 @@ type dataSourceModel struct {
// instanceDataSource is the data source implementation.
type instanceDataSource struct {
client *postgresflexalpha.APIClient
client *v3alpha1api.APIClient
providerData core.ProviderData
}
@ -96,7 +96,7 @@ func (r *instanceDataSource) Read(
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
utils.LogError(
ctx,

View file

@ -40,7 +40,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The host of the instance.",
MarkdownDescription: "The host of the instance.",
},
"port": schema.Int64Attribute{
"port": schema.Int32Attribute{
Computed: true,
Description: "The port of the instance.",
MarkdownDescription: "The port of the instance.",
@ -164,12 +164,12 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"replicas": schema.Int64Attribute{
"replicas": schema.Int32Attribute{
Computed: true,
Description: "How many replicas the instance should have.",
MarkdownDescription: "How many replicas the instance should have.",
},
"retention_days": schema.Int64Attribute{
"retention_days": schema.Int32Attribute{
Computed: true,
Description: "How long backups are retained. The value can only be between 32 and 365 days.",
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 365 days.",
@ -186,7 +186,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The storage class for the storage.",
MarkdownDescription: "The storage class for the storage.",
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Computed: true,
Description: "The storage size in Gigabytes.",
MarkdownDescription: "The storage size in Gigabytes.",
@ -223,8 +223,8 @@ type InstanceModel struct {
Network NetworkValue `tfsdk:"network"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Replicas types.Int64 `tfsdk:"replicas"`
RetentionDays types.Int64 `tfsdk:"retention_days"`
Replicas types.Int32 `tfsdk:"replicas"`
RetentionDays types.Int32 `tfsdk:"retention_days"`
Status types.String `tfsdk:"status"`
Storage StorageValue `tfsdk:"storage"`
Version types.String `tfsdk:"version"`
@ -634,12 +634,12 @@ func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
portVal, ok := portAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
}
if diags.HasError() {
@ -744,12 +744,12 @@ func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]at
return NewWriteValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
portVal, ok := portAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
}
if diags.HasError() {
@ -832,7 +832,7 @@ var _ basetypes.ObjectValuable = WriteValue{}
type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"`
Port basetypes.Int64Value `tfsdk:"port"`
Port basetypes.Int32Value `tfsdk:"port"`
state attr.ValueState
}
@ -843,7 +843,7 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
var err error
attrTypes["host"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["port"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["port"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -898,7 +898,7 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
attributeTypes := map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int64Type{},
"port": basetypes.Int32Type{},
}
if v.IsNull() {
@ -956,7 +956,7 @@ func (v WriteValue) Type(ctx context.Context) attr.Type {
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int64Type{},
"port": basetypes.Int32Type{},
}
}
@ -2020,12 +2020,12 @@ func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@ -2130,12 +2130,12 @@ func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewStorageValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@ -2218,7 +2218,7 @@ var _ basetypes.ObjectValuable = StorageValue{}
type StorageValue struct {
PerformanceClass basetypes.StringValue `tfsdk:"performance_class"`
Size basetypes.Int64Value `tfsdk:"size"`
Size basetypes.Int32Value `tfsdk:"size"`
state attr.ValueState
}
@ -2229,7 +2229,7 @@ func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var err error
attrTypes["performance_class"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -2284,7 +2284,7 @@ func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
attributeTypes := map[string]attr.Type{
"performance_class": basetypes.StringType{},
"size": basetypes.Int64Type{},
"size": basetypes.Int32Type{},
}
if v.IsNull() {
@ -2342,6 +2342,6 @@ func (v StorageValue) Type(ctx context.Context) attr.Type {
func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"performance_class": basetypes.StringType{},
"size": basetypes.Int64Type{},
"size": basetypes.Int32Type{},
}
}

View file

@ -54,7 +54,7 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of owned instances and their current status.",
MarkdownDescription: "List of owned instances and their current status.",
},
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@ -62,19 +62,19 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Computed: true,
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
"total_pages": schema.Int64Attribute{
"total_pages": schema.Int32Attribute{
Computed: true,
},
"total_rows": schema.Int64Attribute{
"total_rows": schema.Int32Attribute{
Computed: true,
},
},
@ -100,7 +100,7 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@ -130,11 +130,11 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
type InstancesModel struct {
Instances types.List `tfsdk:"instances"`
Page types.Int64 `tfsdk:"page"`
Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Size types.Int64 `tfsdk:"size"`
Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@ -662,12 +662,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -680,12 +680,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -716,12 +716,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -734,12 +734,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -829,12 +829,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -847,12 +847,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -883,12 +883,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -901,12 +901,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -991,11 +991,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int64Value `tfsdk:"size"`
Page basetypes.Int32Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@ -1005,11 +1005,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1087,11 +1087,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@ -1163,10 +1163,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
}

View file

@ -7,8 +7,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalphadatasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
@ -33,9 +33,7 @@ func mapGetInstanceResponseToModel(
)
}
isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0
if isConnectionInfoIncomplete {
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull()
@ -46,8 +44,8 @@ func mapGetInstanceResponseToModel(
"write": postgresflexalpharesource.NewWriteValueMust(
postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
"port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
"host": types.StringValue(resp.ConnectionInfo.Write.Host),
"port": types.Int32Value(resp.ConnectionInfo.Write.Port),
},
),
},
@ -62,7 +60,7 @@ func mapGetInstanceResponseToModel(
m.InstanceId.ValueString(),
)
}
m.InstanceId = types.StringPointerValue(resp.Id)
m.InstanceId = types.StringValue(resp.Id)
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
@ -75,12 +73,12 @@ func mapGetInstanceResponseToModel(
netInstAdd := types.StringValue("")
if instAdd, ok := resp.Network.GetInstanceAddressOk(); ok {
netInstAdd = types.StringValue(instAdd)
netInstAdd = types.StringValue(*instAdd)
}
netRtrAdd := types.StringValue("")
if rtrAdd, ok := resp.Network.GetRouterAddressOk(); ok {
netRtrAdd = types.StringValue(rtrAdd)
netRtrAdd = types.StringValue(*rtrAdd)
}
net, diags := postgresflexalpharesource.NewNetworkValue(
@ -97,8 +95,8 @@ func mapGetInstanceResponseToModel(
}
m.Network = net
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
m.Replicas = types.Int32Value(int32(resp.GetReplicas()))
m.RetentionDays = types.Int32Value(resp.GetRetentionDays())
m.Name = types.StringValue(resp.GetName())
@ -108,7 +106,7 @@ func mapGetInstanceResponseToModel(
postgresflexalpharesource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
"size": types.Int64Value(resp.Storage.GetSize()),
"size": types.Int32Value(resp.Storage.GetSize()),
},
)
if diags.HasError() {
@ -131,7 +129,7 @@ func mapGetDataInstanceResponseToModel(
m.FlavorId = types.StringValue(resp.GetFlavorId())
m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
m.InstanceId = types.StringPointerValue(resp.Id)
m.InstanceId = types.StringValue(resp.Id)
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
m.Name = types.StringValue(resp.GetName())
@ -140,14 +138,14 @@ func mapGetDataInstanceResponseToModel(
return err
}
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
m.Replicas = types.Int32Value(int32(resp.GetReplicas()))
m.RetentionDays = types.Int32Value(resp.GetRetentionDays())
m.Status = types.StringValue(string(resp.GetStatus()))
storage, diags := postgresflexalphadatasource.NewStorageValue(
postgresflexalphadatasource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
"size": types.Int64Value(resp.Storage.GetSize()),
"size": types.Int32Value(resp.Storage.GetSize()),
},
)
if diags.HasError() {
@ -159,9 +157,7 @@ func mapGetDataInstanceResponseToModel(
}
func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0
if isConnectionInfoIncomplete {
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull()
@ -172,8 +168,8 @@ func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgre
"write": types.ObjectValueMust(
postgresflexalphadatasource.WriteValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
"port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
"host": types.StringValue(resp.ConnectionInfo.Write.Host),
"port": types.Int32Value(resp.ConnectionInfo.Write.Port),
},
),
},
@ -189,12 +185,12 @@ func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.G
instAddr := ""
if iA, ok := resp.Network.GetInstanceAddressOk(); ok {
instAddr = iA
instAddr = *iA
}
rtrAddr := ""
if rA, ok := resp.Network.GetRouterAddressOk(); ok {
rtrAddr = rA
rtrAddr = *rA
}
net, diags := postgresflexalphadatasource.NewNetworkValue(
@ -216,22 +212,22 @@ func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.G
func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
keyId := ""
if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
keyId = keyIdVal
keyId = *keyIdVal
}
keyRingId := ""
if keyRingIdVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
keyRingId = keyRingIdVal
keyRingId = *keyRingIdVal
}
keyVersion := ""
if keyVersionVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
keyVersion = keyVersionVal
keyVersion = *keyVersionVal
}
svcAcc := ""
if svcAccVal, ok := resp.Encryption.GetServiceAccountOk(); ok {
svcAcc = svcAccVal
svcAcc = *svcAccVal
}
m.Encryption = postgresflexalphadatasource.EncryptionValue{

View file

@ -5,9 +5,9 @@ import (
"testing"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
utils2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
@ -17,7 +17,7 @@ func Test_handleConnectionInfo(t *testing.T) {
ctx context.Context
m *dataSourceModel
hostName string
port int64
port int32
}
tests := []struct {
name string
@ -63,10 +63,10 @@ func Test_handleConnectionInfo(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resp := &postgresflex.GetInstanceResponse{
ConnectionInfo: &postgresflex.InstanceConnectionInfo{
Write: &postgresflex.InstanceConnectionInfoWrite{
Host: utils.Ptr(tt.args.hostName),
Port: utils.Ptr(tt.args.port),
ConnectionInfo: postgresflex.InstanceConnectionInfo{
Write: postgresflex.InstanceConnectionInfoWrite{
Host: tt.args.hostName,
Port: tt.args.port,
},
},
}
@ -93,7 +93,7 @@ func Test_handleConnectionInfo(t *testing.T) {
if !ok {
t.Errorf("could not find a value for port in connection_info.write")
}
if !gotPort.Equal(types.Int64Value(tt.args.port)) {
if !gotPort.Equal(types.Int32Value(tt.args.port)) {
t.Errorf("port value incorrect: want: %d - got: %s", tt.args.port, gotPort.String())
}
}

View file

@ -4,7 +4,6 @@ import (
"context"
_ "embed"
"fmt"
"math"
"net/http"
"strings"
@ -14,8 +13,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
@ -50,7 +49,7 @@ type InstanceResourceIdentityModel struct {
// instanceResource is the resource implementation.
type instanceResource struct {
client *postgresflex.APIClient
client *v3alpha1api.APIClient
providerData core.ProviderData
}
@ -207,15 +206,11 @@ func (r *instanceResource) Create(
return
}
if model.Replicas.ValueInt64() > math.MaxInt32 {
resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
return
}
replVal := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
replVal := model.Replicas.ValueInt32() // nolint:gosec // check is performed above
payload := modelToCreateInstancePayload(netAcl, model, replVal)
// Create new instance
createResp, err := r.client.CreateInstanceRequest(
createResp, err := r.client.DefaultAPI.CreateInstanceRequest(
ctx,
projectId,
region,
@ -236,14 +231,14 @@ func (r *instanceResource) Create(
identity := InstanceResourceIdentityModel{
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
InstanceID: types.StringPointerValue(instanceId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
return
}
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectId, region, *instanceId).
WaitWithContext(ctx)
if err != nil {
core.LogAndAddError(
@ -279,34 +274,32 @@ func modelToCreateInstancePayload(
netAcl []string,
model postgresflexalpha.InstanceModel,
replVal int32,
) postgresflex.CreateInstanceRequestPayload {
var enc *postgresflex.InstanceEncryption
) v3alpha1api.CreateInstanceRequestPayload {
var enc *v3alpha1api.InstanceEncryption
if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
enc = &postgresflex.InstanceEncryption{
KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(),
KekKeyRingId: model.Encryption.KekKeyRingId.ValueStringPointer(),
KekKeyVersion: model.Encryption.KekKeyVersion.ValueStringPointer(),
ServiceAccount: model.Encryption.ServiceAccount.ValueStringPointer(),
enc = &v3alpha1api.InstanceEncryption{
KekKeyId: model.Encryption.KekKeyId.ValueString(),
KekKeyRingId: model.Encryption.KekKeyRingId.ValueString(),
KekKeyVersion: model.Encryption.KekKeyVersion.ValueString(),
ServiceAccount: model.Encryption.ServiceAccount.ValueString(),
}
}
payload := postgresflex.CreateInstanceRequestPayload{
BackupSchedule: model.BackupSchedule.ValueStringPointer(),
payload := v3alpha1api.CreateInstanceRequestPayload{
BackupSchedule: model.BackupSchedule.ValueString(),
Encryption: enc,
FlavorId: model.FlavorId.ValueStringPointer(),
Name: model.Name.ValueStringPointer(),
Network: &postgresflex.InstanceNetworkCreate{
AccessScope: postgresflex.InstanceNetworkGetAccessScopeAttributeType(
model.Network.AccessScope.ValueStringPointer(),
),
Acl: &netAcl,
FlavorId: model.FlavorId.ValueString(),
Name: model.Name.ValueString(),
Network: v3alpha1api.InstanceNetworkCreate{
AccessScope: (*v3alpha1api.InstanceNetworkAccessScope)(model.Network.AccessScope.ValueStringPointer()),
Acl: netAcl,
},
Replicas: postgresflex.CreateInstanceRequestPayloadGetReplicasAttributeType(&replVal),
RetentionDays: model.RetentionDays.ValueInt64Pointer(),
Storage: &postgresflex.StorageCreate{
PerformanceClass: model.Storage.PerformanceClass.ValueStringPointer(),
Size: model.Storage.Size.ValueInt64Pointer(),
Replicas: v3alpha1api.Replicas(replVal),
RetentionDays: model.RetentionDays.ValueInt32(),
Storage: v3alpha1api.StorageCreate{
PerformanceClass: model.Storage.PerformanceClass.ValueString(),
Size: model.Storage.Size.ValueInt32(),
},
Version: model.Version.ValueStringPointer(),
Version: model.Version.ValueString(),
}
return payload
}
@ -347,7 +340,7 @@ func (r *instanceResource) Read(
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode == http.StatusNotFound {
@ -366,7 +359,7 @@ func (r *instanceResource) Read(
return
}
if !model.InstanceId.IsUnknown() && !model.InstanceId.IsNull() {
if respInstanceID != instanceId {
if *respInstanceID != instanceId {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@ -445,29 +438,24 @@ func (r *instanceResource) Update(
return
}
if model.Replicas.ValueInt64() > math.MaxInt32 {
resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
return
}
replInt32 := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
payload := postgresflex.UpdateInstanceRequestPayload{
BackupSchedule: model.BackupSchedule.ValueStringPointer(),
FlavorId: model.FlavorId.ValueStringPointer(),
Name: model.Name.ValueStringPointer(),
Network: &postgresflex.InstanceNetworkUpdate{
Acl: &netAcl,
replInt32 := model.Replicas.ValueInt32()
payload := v3alpha1api.UpdateInstanceRequestPayload{
BackupSchedule: model.BackupSchedule.ValueString(),
FlavorId: model.FlavorId.ValueString(),
Name: model.Name.ValueString(),
Network: v3alpha1api.InstanceNetworkUpdate{
Acl: netAcl,
},
Replicas: postgresflex.UpdateInstanceRequestPayloadGetReplicasAttributeType(&replInt32),
RetentionDays: model.RetentionDays.ValueInt64Pointer(),
Storage: &postgresflex.StorageUpdate{
Size: model.Storage.Size.ValueInt64Pointer(),
Replicas: v3alpha1api.Replicas(replInt32),
RetentionDays: model.RetentionDays.ValueInt32(),
Storage: v3alpha1api.StorageUpdate{
Size: model.Storage.Size.ValueInt32Pointer(),
},
Version: model.Version.ValueStringPointer(),
Version: model.Version.ValueString(),
}
// Update existing instance
err := r.client.UpdateInstanceRequest(
err := r.client.DefaultAPI.UpdateInstanceRequest(
ctx,
projectId,
region,
@ -482,7 +470,7 @@ func (r *instanceResource) Update(
waitResp, err := wait.PartialUpdateInstanceWaitHandler(
ctx,
r.client,
r.client.DefaultAPI,
projectId,
region,
instanceId,
@ -540,7 +528,7 @@ func (r *instanceResource) Delete(
ctx = tflog.SetField(ctx, "region", region)
// Delete existing instance
err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
err := r.client.DefaultAPI.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
return
@ -548,7 +536,7 @@ func (r *instanceResource) Delete(
ctx = core.LogResponse(ctx)
_, err = r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
_, err = r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode != http.StatusNotFound {

View file

@ -5,7 +5,7 @@ package postgresflexalpha
import (
"context"
"fmt"
"github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
"github.com/hashicorp/terraform-plugin-framework-validators/int32validator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/diag"
@ -42,7 +42,7 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
Description: "The host of the instance.",
MarkdownDescription: "The host of the instance.",
},
"port": schema.Int64Attribute{
"port": schema.Int32Attribute{
Computed: true,
Description: "The port of the instance.",
MarkdownDescription: "The port of the instance.",
@ -178,18 +178,18 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
),
},
},
"replicas": schema.Int64Attribute{
"replicas": schema.Int32Attribute{
Required: true,
Description: "How many replicas the instance should have.",
MarkdownDescription: "How many replicas the instance should have.",
Validators: []validator.Int64{
int64validator.OneOf(
Validators: []validator.Int32{
int32validator.OneOf(
1,
3,
),
},
},
"retention_days": schema.Int64Attribute{
"retention_days": schema.Int32Attribute{
Required: true,
Description: "How long backups are retained. The value can only be between 32 and 365 days.",
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 365 days.",
@ -206,7 +206,7 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
Description: "The storage class for the storage.",
MarkdownDescription: "The storage class for the storage.",
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Required: true,
Description: "The storage size in Gigabytes.",
MarkdownDescription: "The storage size in Gigabytes.",
@ -243,8 +243,8 @@ type InstanceModel struct {
Network NetworkValue `tfsdk:"network"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Replicas types.Int64 `tfsdk:"replicas"`
RetentionDays types.Int64 `tfsdk:"retention_days"`
Replicas types.Int32 `tfsdk:"replicas"`
RetentionDays types.Int32 `tfsdk:"retention_days"`
Status types.String `tfsdk:"status"`
Storage StorageValue `tfsdk:"storage"`
Version types.String `tfsdk:"version"`
@ -654,12 +654,12 @@ func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
portVal, ok := portAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
}
if diags.HasError() {
@ -764,12 +764,12 @@ func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]at
return NewWriteValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
portVal, ok := portAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
}
if diags.HasError() {
@ -852,7 +852,7 @@ var _ basetypes.ObjectValuable = WriteValue{}
type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"`
Port basetypes.Int64Value `tfsdk:"port"`
Port basetypes.Int32Value `tfsdk:"port"`
state attr.ValueState
}
@ -863,7 +863,7 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
var err error
attrTypes["host"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["port"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["port"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -918,7 +918,7 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
attributeTypes := map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int64Type{},
"port": basetypes.Int32Type{},
}
if v.IsNull() {
@ -976,7 +976,7 @@ func (v WriteValue) Type(ctx context.Context) attr.Type {
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int64Type{},
"port": basetypes.Int32Type{},
}
}
@ -2040,12 +2040,12 @@ func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@ -2150,12 +2150,12 @@ func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewStorageValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@ -2238,7 +2238,7 @@ var _ basetypes.ObjectValuable = StorageValue{}
type StorageValue struct {
PerformanceClass basetypes.StringValue `tfsdk:"performance_class"`
Size basetypes.Int64Value `tfsdk:"size"`
Size basetypes.Int32Value `tfsdk:"size"`
state attr.ValueState
}
@ -2249,7 +2249,7 @@ func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var err error
attrTypes["performance_class"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -2304,7 +2304,7 @@ func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
attributeTypes := map[string]attr.Type{
"performance_class": basetypes.StringType{},
"size": basetypes.Int64Type{},
"size": basetypes.Int32Type{},
}
if v.IsNull() {
@ -2362,6 +2362,6 @@ func (v StorageValue) Type(ctx context.Context) attr.Type {
func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"performance_class": basetypes.StringType{},
"size": basetypes.Int64Type{},
"size": basetypes.Int32Type{},
}
}

View file

@ -17,8 +17,8 @@ import (
"github.com/hashicorp/terraform-plugin-testing/terraform"
"github.com/stackitcloud/stackit-sdk-go/core/config"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflexalphaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
@ -33,7 +33,7 @@ import (
const pfx = "stackitprivatepreview_postgresflexalpha"
func TestInstanceResourceSchema(t *testing.T) {
t.Parallel()
// t.Parallel()
ctx := context.Background()
schemaRequest := fwresource.SchemaRequest{}
@ -142,6 +142,7 @@ func TestAccInstance(t *testing.T) {
updSizeData.Size = 25
updBackupSched := updSizeData
// api should complain about more than one daily backup
updBackupSched.BackupSchedule = "30 3 * * *"
/*
@ -1139,7 +1140,7 @@ func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
}
ctx := context.Background()
var client *postgresflexalphaPkgGen.APIClient
var client *v3alpha1api.APIClient
var err error
var region, projectID string
@ -1163,7 +1164,7 @@ func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
config.WithEndpoint(testutils.PostgresFlexCustomEndpoint),
)
}
client, err = postgresflexalphaPkgGen.NewAPIClient(apiClientConfigOptions...)
client, err = v3alpha1api.NewAPIClient(apiClientConfigOptions...)
if err != nil {
log.Fatalln(err)
}
@ -1180,7 +1181,7 @@ func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
instancesToDestroy = append(instancesToDestroy, instanceID)
}
instancesResp, err := client.ListInstancesRequest(ctx, projectID, region).
instancesResp, err := client.DefaultAPI.ListInstancesRequest(ctx, projectID, region).
Size(100).
Execute()
if err != nil {
@ -1189,25 +1190,25 @@ func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
items := instancesResp.GetInstances()
for i := range items {
if items[i].Id == nil {
if items[i].Id == "" {
continue
}
if utils.Contains(instancesToDestroy, *items[i].Id) {
err := client.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, region, *items[i].Id)
if utils.Contains(instancesToDestroy, items[i].Id) {
err := client.DefaultAPI.DeleteInstanceRequest(ctx, testutils.ProjectId, region, items[i].Id).Execute()
if err != nil {
return fmt.Errorf("deleting instance %s during CheckDestroy: %w", *items[i].Id, err)
return fmt.Errorf("deleting instance %s during CheckDestroy: %w", items[i].Id, err)
}
err = postgresflexalpha.DeleteInstanceWaitHandler(
ctx,
client,
client.DefaultAPI,
testutils.ProjectId,
testutils.Region,
*items[i].Id,
items[i].Id,
15*time.Minute,
10*time.Second,
)
if err != nil {
return fmt.Errorf("deleting instance %s during CheckDestroy: waiting for deletion %w", *items[i].Id, err)
return fmt.Errorf("deleting instance %s during CheckDestroy: waiting for deletion %w", items[i].Id, err)
}
}
}

View file

@ -8,8 +8,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@ -40,7 +40,7 @@ type dataSourceModel struct {
// userDataSource is the data source implementation.
type userDataSource struct {
client *postgresflex.APIClient
client *v3alpha1api.APIClient
providerData core.ProviderData
}
@ -103,7 +103,7 @@ func (r *userDataSource) Read(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
userId64 := model.UserId.ValueInt64()
userId64 := model.UserId.ValueInt32()
if userId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return
@ -116,7 +116,7 @@ func (r *userDataSource) Read(
ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "user_id", userId)
recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
recordSetResp, err := r.client.DefaultAPI.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
if err != nil {
handleReadError(ctx, &diags, err, projectId, instanceId, userId)
resp.State.RemoveResource(ctx)

View file

@ -14,7 +14,7 @@ import (
func UserDataSourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
"tf_original_api_id": schema.Int64Attribute{
"tf_original_api_id": schema.Int32Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@ -55,7 +55,7 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The current status of the user.",
MarkdownDescription: "The current status of the user.",
},
"user_id": schema.Int64Attribute{
"user_id": schema.Int32Attribute{
Required: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@ -65,12 +65,12 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
}
type UserModel struct {
Id types.Int64 `tfsdk:"tf_original_api_id"`
Id types.Int32 `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Roles types.List `tfsdk:"roles"`
Status types.String `tfsdk:"status"`
UserId types.Int64 `tfsdk:"user_id"`
UserId types.Int32 `tfsdk:"user_id"`
}

View file

@ -25,7 +25,7 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
},
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@ -33,19 +33,19 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"page": schema.Int64Attribute{
"page": schema.Int32Attribute{
Computed: true,
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
"total_pages": schema.Int64Attribute{
"total_pages": schema.Int32Attribute{
Computed: true,
},
"total_rows": schema.Int64Attribute{
"total_rows": schema.Int32Attribute{
Computed: true,
},
},
@ -71,7 +71,7 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"size": schema.Int64Attribute{
"size": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@ -96,7 +96,7 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
"users": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"id": schema.Int64Attribute{
"id": schema.Int32Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@ -128,11 +128,11 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
type UsersModel struct {
InstanceId types.String `tfsdk:"instance_id"`
Page types.Int64 `tfsdk:"page"`
Page types.Int32 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Size types.Int64 `tfsdk:"size"`
Size types.Int32 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
Users types.List `tfsdk:"users"`
}
@ -172,12 +172,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -190,12 +190,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -226,12 +226,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -244,12 +244,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -339,12 +339,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
pageVal, ok := pageAttribute.(basetypes.Int64Value)
pageVal, ok := pageAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -357,12 +357,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -393,12 +393,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -411,12 +411,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -501,11 +501,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int64Value `tfsdk:"size"`
Page basetypes.Int32Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
state attr.ValueState
}
@ -515,11 +515,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -597,11 +597,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
if v.IsNull() {
@ -673,11 +673,11 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
}
}
@ -716,12 +716,12 @@ func (t UsersType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue
return nil, diags
}
idVal, ok := idAttribute.(basetypes.Int64Value)
idVal, ok := idAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@ -845,12 +845,12 @@ func NewUsersValue(attributeTypes map[string]attr.Type, attributes map[string]at
return NewUsersValueUnknown(), diags
}
idVal, ok := idAttribute.(basetypes.Int64Value)
idVal, ok := idAttribute.(basetypes.Int32Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@ -969,7 +969,7 @@ func (t UsersType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = UsersValue{}
type UsersValue struct {
Id basetypes.Int64Value `tfsdk:"id"`
Id basetypes.Int32Value `tfsdk:"id"`
Name basetypes.StringValue `tfsdk:"name"`
Status basetypes.StringValue `tfsdk:"status"`
state attr.ValueState
@ -981,7 +981,7 @@ func (v UsersValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
var val tftypes.Value
var err error
attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["id"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx)
@ -1045,7 +1045,7 @@ func (v UsersValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"id": basetypes.Int64Type{},
"id": basetypes.Int32Type{},
"name": basetypes.StringType{},
"status": basetypes.StringType{},
}
@ -1109,7 +1109,7 @@ func (v UsersValue) Type(ctx context.Context) attr.Type {
func (v UsersValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"id": basetypes.Int64Type{},
"id": basetypes.Int32Type{},
"name": basetypes.StringType{},
"status": basetypes.StringType{},
}

View file

@ -2,18 +2,17 @@ package postgresflexalpha
import (
"fmt"
"strconv"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
// mapDataSourceFields maps API response to data source model, preserving existing ID.
func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSourceModel, region string) error {
func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourceModel, region string) error {
if userResp == nil {
return fmt.Errorf("response is nil")
}
@ -22,27 +21,25 @@ func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSour
}
user := userResp
var userId int64
if model.UserId.ValueInt64() != 0 {
userId = model.UserId.ValueInt64()
} else if user.Id != nil {
userId = *user.Id
var userId int32
if model.UserId.ValueInt32() != 0 {
userId = model.UserId.ValueInt32()
} else {
return fmt.Errorf("user id not present")
}
model.TerraformID = utils.BuildInternalTerraformId(
model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), string(userId),
)
model.UserId = types.Int64Value(userId)
model.UserId = types.Int32Value(userId)
model.Name = types.StringValue(user.GetName())
if user.Roles == nil {
model.Roles = types.List(types.SetNull(types.StringType))
} else {
var roles []attr.Value
for _, role := range *user.Roles {
for _, role := range user.Roles {
roles = append(roles, types.StringValue(string(role)))
}
rolesSet, diags := types.SetValue(types.StringType, roles)
@ -52,24 +49,24 @@ func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSour
model.Roles = types.List(rolesSet)
}
model.Id = types.Int64Value(userId)
model.Id = types.Int32Value(userId)
model.Region = types.StringValue(region)
model.Status = types.StringValue(user.GetStatus())
return nil
}
// toPayloadRoles converts a string slice to the API's role type.
func toPayloadRoles(roles *[]string) *[]postgresflex.UserRole {
var userRoles = make([]postgresflex.UserRole, 0, len(*roles))
for _, role := range *roles {
userRoles = append(userRoles, postgresflex.UserRole(role))
func toPayloadRoles(roles []string) []v3alpha1api.UserRole {
var userRoles = make([]v3alpha1api.UserRole, 0, len(roles))
for _, role := range roles {
userRoles = append(userRoles, v3alpha1api.UserRole(role))
}
return &userRoles
return userRoles
}
// toUpdatePayload creates an API update payload from the resource model.
func toUpdatePayload(model *resourceModel, roles *[]string) (
*postgresflex.UpdateUserRequestPayload,
func toUpdatePayload(model *resourceModel, roles []string) (
*v3alpha1api.UpdateUserRequestPayload,
error,
) {
if model == nil {
@ -79,14 +76,14 @@ func toUpdatePayload(model *resourceModel, roles *[]string) (
return nil, fmt.Errorf("nil roles")
}
return &postgresflex.UpdateUserRequestPayload{
return &v3alpha1api.UpdateUserRequestPayload{
Name: model.Name.ValueStringPointer(),
Roles: toPayloadRoles(roles),
}, nil
}
// toCreatePayload creates an API create payload from the resource model.
func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.CreateUserRequestPayload, error) {
func toCreatePayload(model *resourceModel, roles []string) (*v3alpha1api.CreateUserRequestPayload, error) {
if model == nil {
return nil, fmt.Errorf("nil model")
}
@ -94,14 +91,14 @@ func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.Creat
return nil, fmt.Errorf("nil roles")
}
return &postgresflex.CreateUserRequestPayload{
return &v3alpha1api.CreateUserRequestPayload{
Roles: toPayloadRoles(roles),
Name: model.Name.ValueStringPointer(),
Name: model.Name.ValueString(),
}, nil
}
// mapResourceFields maps API response to the resource model, preserving existing ID.
func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceModel, region string) error {
func mapResourceFields(userResp *v3alpha1api.GetUserResponse, model *resourceModel, region string) error {
if userResp == nil {
return fmt.Errorf("response is nil")
}
@ -110,24 +107,24 @@ func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceMo
}
user := userResp
var userId int64
if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt64() != 0 {
userId = model.UserId.ValueInt64()
} else if user.Id != nil {
userId = *user.Id
var userId int32
if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt32() != 0 {
userId = model.UserId.ValueInt32()
} else if user.Id != 0 {
userId = user.Id
} else {
return fmt.Errorf("user id not present")
}
model.Id = types.Int64Value(userId)
model.UserId = types.Int64Value(userId)
model.Name = types.StringPointerValue(user.Name)
model.Id = types.Int32Value(userId)
model.UserId = types.Int32Value(userId)
model.Name = types.StringValue(user.Name)
if user.Roles == nil {
model.Roles = types.List(types.SetNull(types.StringType))
} else {
var roles []attr.Value
for _, role := range *user.Roles {
for _, role := range user.Roles {
roles = append(roles, types.StringValue(string(role)))
}
rolesSet, diags := types.SetValue(types.StringType, roles)
@ -137,6 +134,6 @@ func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceMo
model.Roles = types.List(rolesSet)
}
model.Region = types.StringValue(region)
model.Status = types.StringPointerValue(user.Status)
model.Status = types.StringValue(user.Status)
return nil
}

View file

@ -8,7 +8,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
)
@ -27,8 +28,8 @@ func TestMapDataSourceFields(t *testing.T) {
testRegion,
dataSourceModel{
UserModel: data.UserModel{
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue(""),
@ -53,8 +54,8 @@ func TestMapDataSourceFields(t *testing.T) {
testRegion,
dataSourceModel{
UserModel: data.UserModel{
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue("username"),
@ -77,7 +78,7 @@ func TestMapDataSourceFields(t *testing.T) {
{
"null_fields_and_int_conversions",
&postgresflex.GetUserResponse{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(int32(1)),
Roles: &[]postgresflex.UserRole{},
Name: nil,
Status: utils.Ptr("status"),
@ -85,8 +86,8 @@ func TestMapDataSourceFields(t *testing.T) {
testRegion,
dataSourceModel{
UserModel: data.UserModel{
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue(""),
@ -160,12 +161,12 @@ func TestMapFieldsCreate(t *testing.T) {
{
"default_values",
&postgresflex.GetUserResponse{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(Int32(1)),
},
testRegion,
resourceModel{
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@ -180,14 +181,14 @@ func TestMapFieldsCreate(t *testing.T) {
{
"simple_values",
&postgresflex.GetUserResponse{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(Int32(1)),
Name: utils.Ptr("username"),
Status: utils.Ptr("status"),
},
testRegion,
resourceModel{
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue("username"),
@ -202,14 +203,14 @@ func TestMapFieldsCreate(t *testing.T) {
{
"null_fields_and_int_conversions",
&postgresflex.GetUserResponse{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(Int32(1)),
Name: nil,
Status: nil,
},
testRegion,
resourceModel{
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@ -281,12 +282,12 @@ func TestMapFields(t *testing.T) {
{
"default_values",
&postgresflex.GetUserResponse{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(Int32(1)),
},
testRegion,
resourceModel{
Id: types.Int64Value(1),
UserId: types.Int64Value(int64(1)),
Id: types.Int32Value(1),
UserId: types.Int32Value(Int32(1)),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@ -300,7 +301,7 @@ func TestMapFields(t *testing.T) {
{
"simple_values",
&postgresflex.GetUserResponse{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(Int32(1)),
Roles: &[]postgresflex.UserRole{
"role_1",
"role_2",
@ -310,8 +311,8 @@ func TestMapFields(t *testing.T) {
},
testRegion,
resourceModel{
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue("username"),
@ -333,13 +334,13 @@ func TestMapFields(t *testing.T) {
{
"null_fields_and_int_conversions",
&postgresflex.GetUserResponse{
Id: utils.Ptr(int64(1)),
Id: utils.Ptr(Int32(1)),
Name: nil,
},
testRegion,
resourceModel{
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),

View file

@ -12,8 +12,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
@ -55,12 +55,12 @@ type UserResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
InstanceID types.String `tfsdk:"instance_id"`
UserID types.Int64 `tfsdk:"user_id"`
UserID types.Int32 `tfsdk:"user_id"`
}
// userResource implements the resource handling for a PostgreSQL Flex user.
type userResource struct {
client *postgresflex.APIClient
client *v3alpha1api.APIClient
providerData core.ProviderData
}
@ -202,14 +202,14 @@ func (r *userResource) Create(
}
// Generate API request body from model
payload, err := toCreatePayload(&model, &roles)
payload, err := toCreatePayload(&model, roles)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err))
return
}
// Create new user
userResp, err := r.client.CreateUserRequest(
userResp, err := r.client.DefaultAPI.CreateUserRequest(
ctx,
arg.projectId,
arg.region,
@ -221,7 +221,7 @@ func (r *userResource) Create(
}
id, ok := userResp.GetIdOk()
if !ok || id == 0 {
if !ok || *id == 0 {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@ -230,7 +230,7 @@ func (r *userResource) Create(
)
return
}
arg.userId = id
arg.userId = *id
ctx = tflog.SetField(ctx, "user_id", id)
@ -241,25 +241,25 @@ func (r *userResource) Create(
ProjectID: types.StringValue(arg.projectId),
Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceId),
UserID: types.Int64Value(id),
UserID: types.Int32Value(*id),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
return
}
model.Id = types.Int64Value(id)
model.UserId = types.Int64Value(id)
model.Id = types.Int32Value(*id)
model.UserId = types.Int32Value(*id)
model.Password = types.StringValue(userResp.GetPassword())
model.Status = types.StringValue(userResp.GetStatus())
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx,
r.client,
r.client.DefaultAPI,
arg.projectId,
arg.instanceId,
arg.region,
id,
*id,
).SetSleepBeforeWait(
10 * time.Second,
).SetTimeout(
@ -276,7 +276,7 @@ func (r *userResource) Create(
return
}
if waitResp.Id == nil {
if waitResp.Id == 0 {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@ -285,7 +285,7 @@ func (r *userResource) Create(
)
return
}
if waitResp.Id == nil || *waitResp.Id != id {
if waitResp.Id != *id {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@ -336,11 +336,11 @@ func (r *userResource) Read(
// Read resource state
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx,
r.client,
r.client.DefaultAPI,
arg.projectId,
arg.instanceId,
arg.region,
model.UserId.ValueInt64(),
model.UserId.ValueInt32(),
).SetSleepBeforeWait(
10 * time.Second,
).SetTimeout(
@ -357,7 +357,7 @@ func (r *userResource) Read(
return
}
if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() {
if waitResp.Id != model.UserId.ValueInt32() {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@ -366,7 +366,7 @@ func (r *userResource) Read(
)
return
}
arg.userId = *waitResp.Id
arg.userId = waitResp.Id
ctx = core.LogResponse(ctx)
@ -375,7 +375,7 @@ func (r *userResource) Read(
ProjectID: types.StringValue(arg.projectId),
Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceId),
UserID: types.Int64Value(arg.userId),
UserID: types.Int32Value(arg.userId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@ -429,7 +429,7 @@ func (r *userResource) Update(
}
// Generate API request body from model
payload, err := toUpdatePayload(&model, &roles)
payload, err := toUpdatePayload(&model, roles)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Updating API payload: %v", err))
return
@ -443,7 +443,7 @@ func (r *userResource) Update(
userId := int32(userId64) // nolint:gosec // check is performed above
// Update existing instance
err = r.client.UpdateUserRequest(
err = r.client.DefaultAPI.UpdateUserRequest(
ctx,
arg.projectId,
arg.region,
@ -462,7 +462,7 @@ func (r *userResource) Update(
ProjectID: types.StringValue(arg.projectId),
Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceId),
UserID: types.Int64Value(userId64),
UserID: types.Int32Value(userId64),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@ -472,11 +472,11 @@ func (r *userResource) Update(
// Verify update
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx,
r.client,
r.client.DefaultAPI,
arg.projectId,
arg.instanceId,
arg.region,
model.UserId.ValueInt64(),
model.UserId.ValueInt32(),
).SetSleepBeforeWait(
10 * time.Second,
).SetTimeout(
@ -493,7 +493,7 @@ func (r *userResource) Update(
return
}
if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() {
if waitResp.Id != model.UserId.ValueInt32() {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@ -502,7 +502,7 @@ func (r *userResource) Update(
)
return
}
arg.userId = *waitResp.Id
arg.userId = waitResp.Id
// Set state to fully populated data
diags = resp.State.Set(ctx, stateModel)
@ -555,7 +555,7 @@ func (r *userResource) Delete(
userId := int32(userId64) // nolint:gosec // check is performed above
// Delete existing record set
err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
err := r.client.DefaultAPI.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
}
@ -571,7 +571,7 @@ func (r *userResource) Delete(
// if exists {
// core.LogAndAddError(
// ctx, &resp.Diagnostics, "Error deleting user",
// fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt64()),
// fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt32()),
// )
// return
//}
@ -598,7 +598,7 @@ func (r *userResource) IdentitySchema(
"instance_id": identityschema.StringAttribute{
RequiredForImport: true,
},
"user_id": identityschema.Int64Attribute{
"user_id": identityschema.Int32Attribute{
RequiredForImport: true,
},
},
@ -610,7 +610,7 @@ type clientArg struct {
projectId string
instanceId string
region string
userId int64
userId int32
}
// ImportState imports a resource into the Terraform state on success.
@ -668,7 +668,7 @@ func (r *userResource) ImportState(
projectId := identityData.ProjectID.ValueString()
region := identityData.Region.ValueString()
instanceId := identityData.InstanceID.ValueString()
userId := identityData.UserID.ValueInt64()
userId := identityData.UserID.ValueInt32()
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
@ -684,15 +684,15 @@ func (r *userResource) extractIdentityData(
identity UserResourceIdentityModel,
) (*clientArg, error) {
var projectId, region, instanceId string
var userId int64
var userId int32
if !model.UserId.IsNull() && !model.UserId.IsUnknown() {
userId = model.UserId.ValueInt64()
userId = model.UserId.ValueInt32()
} else {
if identity.UserID.IsNull() || identity.UserID.IsUnknown() {
return nil, fmt.Errorf("user_id not found in config")
}
userId = identity.UserID.ValueInt64()
userId = identity.UserID.ValueInt32()
}
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {

View file

@ -14,7 +14,7 @@ import (
func UserResourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
"id": schema.Int64Attribute{
"id": schema.Int32Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@ -64,7 +64,7 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
Description: "The current status of the user.",
MarkdownDescription: "The current status of the user.",
},
"user_id": schema.Int64Attribute{
"user_id": schema.Int32Attribute{
Optional: true,
Computed: true,
Description: "The ID of the user.",
@ -75,7 +75,7 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
}
type UserModel struct {
Id types.Int64 `tfsdk:"id"`
Id types.Int32 `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Password types.String `tfsdk:"password"`
@ -83,5 +83,5 @@ type UserModel struct {
Region types.String `tfsdk:"region"`
Roles types.List `tfsdk:"roles"`
Status types.String `tfsdk:"status"`
UserId types.Int64 `tfsdk:"user_id"`
UserId types.Int32 `tfsdk:"user_id"`
}

View file

@ -9,7 +9,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/stackitcloud/stackit-sdk-go/core/config"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"

View file

@ -15,7 +15,7 @@ import (
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
)
const (