chore: work save
Some checks failed
CI Workflow / Check GoReleaser config (pull_request) Successful in 4s
CI Workflow / Test readiness for publishing provider (pull_request) Failing after 4m1s
CI Workflow / CI run build and linting (pull_request) Failing after 5m1s
CI Workflow / CI run tests (pull_request) Failing after 5m16s
CI Workflow / Code coverage report (pull_request) Has been skipped

This commit is contained in:
Marcel S. Henselin 2026-03-06 16:06:32 +01:00
parent d6d3a795bb
commit 7f5802aff0
27 changed files with 802 additions and 962 deletions

View file

@ -156,7 +156,7 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
}
if isIdSet {
databaseId := model.DatabaseId.ValueInt32()
databaseId := model.DatabaseId.ValueInt64()
ctx = tflog.SetField(ctx, "database_id", databaseId)
return getDatabaseById(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseId)
}

View file

@ -14,12 +14,12 @@ import (
func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
"database_id": schema.Int32Attribute{
"database_id": schema.Int64Attribute{
Required: true,
Description: "The ID of the database.",
MarkdownDescription: "The ID of the database.",
},
"tf_original_api_id": schema.Int32Attribute{
"tf_original_api_id": schema.Int64Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@ -59,8 +59,8 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
}
type DatabaseModel struct {
DatabaseId types.Int32 `tfsdk:"database_id"`
Id types.Int32 `tfsdk:"tf_original_api_id"`
DatabaseId types.Int64 `tfsdk:"database_id"`
Id types.Int64 `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Owner types.String `tfsdk:"owner"`

View file

@ -23,7 +23,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
"databases": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"id": schema.Int32Attribute{
"id": schema.Int64Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@ -54,7 +54,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
},
"page": schema.Int32Attribute{
"page": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@ -62,19 +62,19 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"page": schema.Int32Attribute{
"page": schema.Int64Attribute{
Computed: true,
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
"total_pages": schema.Int32Attribute{
"total_pages": schema.Int64Attribute{
Computed: true,
},
"total_rows": schema.Int32Attribute{
"total_rows": schema.Int64Attribute{
Computed: true,
},
},
@ -100,7 +100,7 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@ -131,11 +131,11 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
type DatabasesModel struct {
Databases types.List `tfsdk:"databases"`
InstanceId types.String `tfsdk:"instance_id"`
Page types.Int32 `tfsdk:"page"`
Page types.Int64 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Size types.Int32 `tfsdk:"size"`
Size types.Int64 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@ -174,12 +174,12 @@ func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectV
return nil, diags
}
idVal, ok := idAttribute.(basetypes.Int32Value)
idVal, ok := idAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@ -303,12 +303,12 @@ func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[strin
return NewDatabasesValueUnknown(), diags
}
idVal, ok := idAttribute.(basetypes.Int32Value)
idVal, ok := idAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@ -427,7 +427,7 @@ func (t DatabasesType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = DatabasesValue{}
type DatabasesValue struct {
Id basetypes.Int32Value `tfsdk:"id"`
Id basetypes.Int64Value `tfsdk:"id"`
Name basetypes.StringValue `tfsdk:"name"`
Owner basetypes.StringValue `tfsdk:"owner"`
state attr.ValueState
@ -439,7 +439,7 @@ func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, er
var val tftypes.Value
var err error
attrTypes["id"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx)
@ -503,7 +503,7 @@ func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValu
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"id": basetypes.Int32Type{},
"id": basetypes.Int64Type{},
"name": basetypes.StringType{},
"owner": basetypes.StringType{},
}
@ -567,7 +567,7 @@ func (v DatabasesValue) Type(ctx context.Context) attr.Type {
func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"id": basetypes.Int32Type{},
"id": basetypes.Int64Type{},
"name": basetypes.StringType{},
"owner": basetypes.StringType{},
}
@ -608,12 +608,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
pageVal, ok := pageAttribute.(basetypes.Int32Value)
pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -626,12 +626,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -662,12 +662,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -680,12 +680,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -775,12 +775,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
pageVal, ok := pageAttribute.(basetypes.Int32Value)
pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -793,12 +793,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -829,12 +829,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -847,12 +847,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -937,11 +937,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
Page basetypes.Int32Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"`
Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int64Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
state attr.ValueState
}
@ -951,11 +951,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1033,11 +1033,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
}
if v.IsNull() {
@ -1109,10 +1109,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
}
}

View file

@ -23,10 +23,10 @@ func getDatabaseById(
ctx context.Context,
client databaseClientReader,
projectId, region, instanceId string,
databaseId int32,
databaseId int64,
) (*v3alpha1api.ListDatabase, error) {
filter := func(db v3alpha1api.ListDatabase) bool {
return db.Id == databaseId
return int64(db.Id) == databaseId
}
return getDatabase(ctx, client, projectId, region, instanceId, filter)
}

View file

@ -5,127 +5,99 @@ import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
)
type mockRequest struct {
executeFunc func() (*postgresflex.ListDatabasesResponse, error)
}
func (m *mockRequest) Page(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
func (m *mockRequest) Size(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
func (m *mockRequest) Sort(_ postgresflex.DatabaseSort) postgresflex.ApiListDatabasesRequestRequest {
return m
}
func (m *mockRequest) Execute() (*postgresflex.ListDatabasesResponse, error) {
return m.executeFunc()
}
type mockDBClient struct {
executeRequest func() postgresflex.ApiListDatabasesRequestRequest
}
var _ databaseClientReader = (*mockDBClient)(nil)
func (m *mockDBClient) ListDatabasesRequest(
_ context.Context,
_, _, _ string,
) postgresflex.ApiListDatabasesRequestRequest {
return m.executeRequest()
}
func TestGetDatabase(t *testing.T) {
mockResp := func(page int32) (*postgresflex.ListDatabasesResponse, error) {
mockResp := func(page int32) (*v3alpha1api.ListDatabasesResponse, error) {
if page == 1 {
return &postgresflex.ListDatabasesResponse{
Databases: &[]postgresflex.ListDatabase{
{Id: utils.Ptr(int32(1)), Name: utils.Ptr("first")},
{Id: utils.Ptr(int32(2)), Name: utils.Ptr("second")},
return &v3alpha1api.ListDatabasesResponse{
Databases: []v3alpha1api.ListDatabase{
{Id: int32(1), Name: "first"},
{Id: int32(2), Name: "second"},
},
Pagination: &postgresflex.Pagination{
Page: utils.Ptr(int32(1)),
TotalPages: utils.Ptr(int32(2)),
Size: utils.Ptr(int32(3)),
Pagination: v3alpha1api.Pagination{
Page: int32(1),
TotalPages: int32(2),
Size: int32(3),
},
}, nil
}
if page == 2 {
return &postgresflex.ListDatabasesResponse{
Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int32(3)), Name: utils.Ptr("three")}},
Pagination: &postgresflex.Pagination{
Page: utils.Ptr(int32(2)),
TotalPages: utils.Ptr(int32(2)),
Size: utils.Ptr(int32(3)),
return &v3alpha1api.ListDatabasesResponse{
Databases: []v3alpha1api.ListDatabase{{Id: int32(3), Name: "three"}},
Pagination: v3alpha1api.Pagination{
Page: int32(2),
TotalPages: int32(2),
Size: int32(3),
},
}, nil
}
return &postgresflex.ListDatabasesResponse{
Databases: &[]postgresflex.ListDatabase{},
Pagination: &postgresflex.Pagination{
Page: utils.Ptr(int32(3)),
TotalPages: utils.Ptr(int32(2)),
Size: utils.Ptr(int32(3)),
return &v3alpha1api.ListDatabasesResponse{
Databases: []v3alpha1api.ListDatabase{},
Pagination: v3alpha1api.Pagination{
Page: int32(3),
TotalPages: int32(2),
Size: int32(3),
},
}, nil
}
tests := []struct {
description string
projectId string
projectID string
region string
instanceId string
instanceID string
wantErr bool
wantDbName string
wantDbId int32
wantDbID int32
}{
{
description: "Success - Found by name on first page",
projectId: "pid", region: "reg", instanceId: "inst",
projectID: "pid", region: "reg", instanceID: "inst",
wantErr: false,
wantDbName: "second",
},
{
description: "Success - Found by id on first page",
projectId: "pid", region: "reg", instanceId: "inst",
projectID: "pid", region: "reg", instanceID: "inst",
wantErr: false,
wantDbId: 2,
wantDbID: 2,
},
{
description: "Success - Found by name on second page",
projectId: "pid", region: "reg", instanceId: "inst",
projectID: "pid", region: "reg", instanceID: "inst",
wantErr: false,
wantDbName: "three",
},
{
description: "Success - Found by id on second page",
projectId: "pid", region: "reg", instanceId: "inst",
projectID: "pid", region: "reg", instanceID: "inst",
wantErr: false,
wantDbId: 1,
wantDbID: 1,
},
{
description: "Error - API failure",
projectId: "pid", region: "reg", instanceId: "inst",
projectID: "pid", region: "reg", instanceID: "inst",
wantErr: true,
},
{
description: "Error - Missing parameters",
projectId: "", region: "reg", instanceId: "inst",
projectID: "", region: "reg", instanceID: "inst",
wantErr: true,
},
{
description: "Error - Search by name not found after all pages",
projectId: "pid", region: "reg", instanceId: "inst",
projectID: "pid", region: "reg", instanceID: "inst",
wantDbName: "non-existent",
wantErr: true,
},
{
description: "Error - Search by id not found after all pages",
projectId: "pid", region: "reg", instanceId: "inst",
wantDbId: 999999,
projectID: "pid", region: "reg", instanceID: "inst",
wantDbID: 999999,
wantErr: true,
},
}
@ -134,46 +106,45 @@ func TestGetDatabase(t *testing.T) {
t.Run(
tt.description, func(t *testing.T) {
var currentPage int32
client := &mockDBClient{
executeRequest: func() postgresflex.ApiListDatabasesRequestRequest {
return &mockRequest{
executeFunc: func() (*postgresflex.ListDatabasesResponse, error) {
currentPage++
return mockResp(currentPage)
},
}
},
mockCall := func(_ v3alpha1api.ApiListDatabasesRequestRequest) (*v3alpha1api.ListDatabasesResponse, error) {
currentPage++
return mockResp(currentPage)
}
var actual *postgresflex.ListDatabase
client := &v3alpha1api.DefaultAPIServiceMock{
ListDatabasesRequestExecuteMock: &mockCall,
}
var actual *v3alpha1api.ListDatabase
var errDB error
if tt.wantDbName != "" {
actual, errDB = getDatabaseByName(
t.Context(),
client,
tt.projectId,
tt.projectID,
tt.region,
tt.instanceId,
tt.instanceID,
tt.wantDbName,
)
} else if tt.wantDbId != 0 {
} else if tt.wantDbID != 0 {
actual, errDB = getDatabaseById(
t.Context(),
client,
tt.projectId,
tt.projectID,
tt.region,
tt.instanceId,
tt.wantDbId,
tt.instanceID,
int64(tt.wantDbID),
)
} else {
actual, errDB = getDatabase(
context.Background(),
client,
tt.projectId,
tt.projectID,
tt.region,
tt.instanceId,
func(_ postgresflex.ListDatabase) bool { return false },
tt.instanceID,
func(_ v3alpha1api.ListDatabase) bool { return false },
)
}
@ -182,14 +153,14 @@ func TestGetDatabase(t *testing.T) {
return
}
if !tt.wantErr && tt.wantDbName != "" && actual != nil {
if *actual.Name != tt.wantDbName {
t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", *actual.Name, tt.wantDbName)
if actual.Name != tt.wantDbName {
t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", actual.Name, tt.wantDbName)
}
}
if !tt.wantErr && tt.wantDbId != 0 && actual != nil {
if *actual.Id != tt.wantDbId {
t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", *actual.Id, tt.wantDbId)
if !tt.wantErr && tt.wantDbID != 0 && actual != nil {
if actual.Id != tt.wantDbID {
t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", actual.Id, tt.wantDbID)
}
}
},
@ -200,23 +171,18 @@ func TestGetDatabase(t *testing.T) {
func TestCleanString(t *testing.T) {
testcases := []struct {
name string
given *string
expected *string
given string
expected string
}{
{
name: "should remove quotes",
given: utils.Ptr("\"quoted\""),
expected: utils.Ptr("quoted"),
},
{
name: "should handle nil",
given: nil,
expected: nil,
given: "\"quoted\"",
expected: "quoted",
},
{
name: "should not change unquoted string",
given: utils.Ptr("unquoted"),
expected: utils.Ptr("unquoted"),
given: "unquoted",
expected: "unquoted",
},
}

View file

@ -2,6 +2,7 @@ package postgresflexalpha
import (
"fmt"
"strconv"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
@ -25,17 +26,17 @@ func mapFields(
return fmt.Errorf("model given is nil")
}
var databaseId int32
if model.DatabaseId.ValueInt32() != 0 {
databaseId = model.DatabaseId.ValueInt32()
var databaseId int64
if model.DatabaseId.ValueInt64() != 0 {
databaseId = model.DatabaseId.ValueInt64()
} else if source.Id != 0 {
databaseId = source.Id
databaseId = int64(source.Id)
} else {
return fmt.Errorf("database id not present")
}
model.Id = types.Int32Value(databaseId)
model.DatabaseId = types.Int32Value(databaseId)
model.Id = types.Int64Value(databaseId)
model.DatabaseId = types.Int64Value(databaseId)
model.Name = types.StringValue(source.GetName())
model.Owner = types.StringValue(cleanString(source.Owner))
model.Region = types.StringValue(region)
@ -45,7 +46,7 @@ func mapFields(
model.ProjectId.ValueString(),
region,
model.InstanceId.ValueString(),
string(databaseId),
strconv.FormatInt(databaseId, 10),
)
return nil
@ -63,17 +64,17 @@ func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceM
return fmt.Errorf("model input is nil")
}
var databaseId int32
if model.Id.ValueInt32() != 0 {
databaseId = model.Id.ValueInt32()
var databaseId int64
if model.Id.ValueInt64() != 0 {
databaseId = model.Id.ValueInt64()
} else if source.Id != 0 {
databaseId = source.Id
databaseId = int64(source.Id)
} else {
return fmt.Errorf("database id not present")
}
model.Id = types.Int32Value(databaseId)
model.DatabaseId = types.Int32Value(databaseId)
model.Id = types.Int64Value(databaseId)
model.DatabaseId = types.Int64Value(databaseId)
model.Name = types.StringValue(source.GetName())
model.Owner = types.StringValue(cleanString(source.Owner))
return nil

View file

@ -7,7 +7,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
)
@ -31,9 +31,9 @@ func TestMapFields(t *testing.T) {
name: "should map fields correctly",
given: given{
source: &postgresflexalpha.ListDatabase{
Id: utils.Ptr(int32(1)),
Name: utils.Ptr("my-db"),
Owner: utils.Ptr("\"my-owner\""),
Id: int32(1),
Name: "my-db",
Owner: "my-owner",
},
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
@ -46,11 +46,11 @@ func TestMapFields(t *testing.T) {
expected: expected{
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
Id: types.Int32Value(1),
Id: types.Int64Value(1),
Name: types.StringValue("my-db"),
Owner: types.StringValue("my-owner"),
Region: types.StringValue("eu01"),
DatabaseId: types.Int32Value(1),
DatabaseId: types.Int64Value(1),
InstanceId: types.StringValue("my-instance"),
ProjectId: types.StringValue("my-project"),
},
@ -62,12 +62,12 @@ func TestMapFields(t *testing.T) {
name: "should preserve existing model ID",
given: given{
source: &postgresflexalpha.ListDatabase{
Id: utils.Ptr(int32(1)),
Name: utils.Ptr("my-db"),
Id: int32(1),
Name: "my-db",
},
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
Id: types.Int32Value(1),
Id: types.Int64Value(1),
ProjectId: types.StringValue("my-project"),
InstanceId: types.StringValue("my-instance"),
},
@ -77,9 +77,10 @@ func TestMapFields(t *testing.T) {
expected: expected{
model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{
Id: types.Int32Value(1),
Name: types.StringValue("my-db"),
Owner: types.StringNull(), DatabaseId: types.Int32Value(1),
Id: types.Int64Value(1),
Name: types.StringValue("my-db"),
Owner: types.StringValue(""),
DatabaseId: types.Int64Value(1),
Region: types.StringValue("eu01"),
InstanceId: types.StringValue("my-instance"),
ProjectId: types.StringValue("my-project"),
@ -99,7 +100,7 @@ func TestMapFields(t *testing.T) {
{
name: "should fail on nil source ID",
given: given{
source: &postgresflexalpha.ListDatabase{Id: nil},
source: &postgresflexalpha.ListDatabase{Id: 0},
model: &dataSourceModel{},
},
expected: expected{err: true},
@ -107,7 +108,7 @@ func TestMapFields(t *testing.T) {
{
name: "should fail on nil model",
given: given{
source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(Int32(1))},
source: &postgresflexalpha.ListDatabase{Id: int32(1)},
model: nil,
},
expected: expected{err: true},
@ -150,18 +151,18 @@ func TestMapResourceFields(t *testing.T) {
name: "should map fields correctly",
given: given{
source: &postgresflexalpha.GetDatabaseResponse{
Id: utils.Ptr(Int32(1)),
Name: utils.Ptr("my-db"),
Owner: utils.Ptr("my-owner"),
Id: int32(1),
Name: "my-db",
Owner: "my-owner",
},
model: &resourceModel{},
},
expected: expected{
model: &resourceModel{
Id: types.Int32Value(1),
Id: types.Int64Value(1),
Name: types.StringValue("my-db"),
Owner: types.StringValue("my-owner"),
DatabaseId: types.Int32Value(1),
DatabaseId: types.Int64Value(1),
},
},
},
@ -216,7 +217,7 @@ func TestToCreatePayload(t *testing.T) {
},
expected: expected{
payload: &postgresflexalpha.CreateDatabaseRequestPayload{
Name: utils.Ptr("my-db"),
Name: "my-db",
Owner: utils.Ptr("my-owner"),
},
},

View file

@ -50,7 +50,7 @@ type DatabaseResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
InstanceID types.String `tfsdk:"instance_id"`
DatabaseID types.Int32 `tfsdk:"database_id"`
DatabaseID types.Int64 `tfsdk:"database_id"`
}
// databaseResource is the resource implementation.
@ -155,7 +155,7 @@ func (r *databaseResource) IdentitySchema(
"instance_id": identityschema.StringAttribute{
RequiredForImport: true,
},
"database_id": identityschema.Int32Attribute{
"database_id": identityschema.Int64Attribute{
RequiredForImport: true,
},
},
@ -219,7 +219,7 @@ func (r *databaseResource) Create(
)
return
}
databaseId := *dbID
databaseId := int64(*dbID)
ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = core.LogResponse(ctx)
@ -228,7 +228,7 @@ func (r *databaseResource) Create(
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int32Value(int32(databaseId)),
DatabaseID: types.Int64Value(databaseId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@ -287,7 +287,7 @@ func (r *databaseResource) Read(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
region := model.Region.ValueString()
databaseId := model.DatabaseId.ValueInt32()
databaseId := model.DatabaseId.ValueInt64()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
@ -322,13 +322,12 @@ func (r *databaseResource) Read(
return
}
// TODO: use values from api to identify drift
// Save identity into Terraform state
identity := DatabaseResourceIdentityModel{
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int32Value(databaseId),
DatabaseID: types.Int64Value(int64(databaseResp.GetId())),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@ -362,7 +361,7 @@ func (r *databaseResource) Update(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
region := model.Region.ValueString()
databaseId := model.DatabaseId.ValueInt32()
databaseId := model.DatabaseId.ValueInt64()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
@ -394,13 +393,18 @@ func (r *databaseResource) Update(
return
}
if databaseId > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", "databaseID out of bounds for int32")
return
}
databaseId32 := int32(databaseId)
// Update existing database
err := r.client.DefaultAPI.UpdateDatabasePartiallyRequest(
ctx,
projectId,
region,
instanceId,
databaseId,
databaseId32,
).UpdateDatabasePartiallyRequestPayload(payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error())
@ -437,7 +441,7 @@ func (r *databaseResource) Update(
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int32Value(databaseId),
DatabaseID: types.Int64Value(databaseId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@ -567,7 +571,7 @@ func (r *databaseResource) ImportState(
projectId := identityData.ProjectID.ValueString()
region := identityData.Region.ValueString()
instanceId := identityData.InstanceID.ValueString()
databaseId := identityData.DatabaseID.ValueInt32()
databaseId := identityData.DatabaseID.ValueInt64()
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
@ -581,14 +585,14 @@ func (r *databaseResource) ImportState(
func (r *databaseResource) extractIdentityData(
model resourceModel,
identity DatabaseResourceIdentityModel,
) (projectId, region, instanceId string, databaseId int32, err error) {
) (projectId, region, instanceId string, databaseId int64, err error) {
if !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() {
databaseId = model.DatabaseId.ValueInt32()
databaseId = model.DatabaseId.ValueInt64()
} else {
if identity.DatabaseID.IsNull() || identity.DatabaseID.IsUnknown() {
return "", "", "", 0, fmt.Errorf("database_id not found in config")
}
databaseId = identity.DatabaseID.ValueInt32()
databaseId = identity.DatabaseID.ValueInt64()
}
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {

View file

@ -14,13 +14,13 @@ import (
func DatabaseResourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
"database_id": schema.Int32Attribute{
"database_id": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "The ID of the database.",
MarkdownDescription: "The ID of the database.",
},
"id": schema.Int32Attribute{
"id": schema.Int64Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@ -64,8 +64,8 @@ func DatabaseResourceSchema(ctx context.Context) schema.Schema {
}
type DatabaseModel struct {
DatabaseId types.Int32 `tfsdk:"database_id"`
Id types.Int32 `tfsdk:"id"`
DatabaseId types.Int64 `tfsdk:"database_id"`
Id types.Int64 `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Owner types.String `tfsdk:"owner"`

View file

@ -1,133 +1,134 @@
package postgresFlexAlphaFlavor
import (
"context"
"testing"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
)
type mockRequest struct {
executeFunc func() (*postgresflex.GetFlavorsResponse, error)
}
func (m *mockRequest) Page(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
func (m *mockRequest) Size(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
func (m *mockRequest) Sort(_ postgresflex.FlavorSort) postgresflex.ApiGetFlavorsRequestRequest {
return m
}
func (m *mockRequest) Execute() (*postgresflex.GetFlavorsResponse, error) {
return m.executeFunc()
}
type mockFlavorsClient struct {
executeRequest func() postgresflex.ApiGetFlavorsRequestRequest
}
func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) postgresflex.ApiGetFlavorsRequestRequest {
return m.executeRequest()
}
var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) {
if page == 1 {
return &postgresflex.GetFlavorsResponse{
Flavors: []postgresflex.ListFlavors{
{Id: "flavor-1", Description: "first"},
{Id: "flavor-2", Description: "second"},
},
}, nil
}
if page == 2 {
return &postgresflex.GetFlavorsResponse{
Flavors: []postgresflex.ListFlavors{
{Id: "flavor-3", Description: "three"},
},
}, nil
}
return &postgresflex.GetFlavorsResponse{
Flavors: []postgresflex.ListFlavors{},
}, nil
}
func TestGetFlavorsByFilter(t *testing.T) {
tests := []struct {
description string
projectId string
region string
mockErr error
filter func(postgresflex.ListFlavors) bool
wantCount int
wantErr bool
}{
{
description: "Success - Get all flavors (2 pages)",
projectId: "pid", region: "reg",
filter: func(_ postgresflex.ListFlavors) bool { return true },
wantCount: 3,
wantErr: false,
},
{
description: "Success - Filter flavors by description",
projectId: "pid", region: "reg",
filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" },
wantCount: 1,
wantErr: false,
},
{
description: "Error - Missing parameters",
projectId: "", region: "reg",
wantErr: true,
},
}
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
var currentPage int32
client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
return mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++
return mockResp(currentPage)
},
}
},
}
actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter)
if (err != nil) != tt.wantErr {
t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !tt.wantErr && len(actual) != tt.wantCount {
t.Errorf("getFlavorsByFilter() got %d flavors, want %d", len(actual), tt.wantCount)
}
},
)
}
}
func TestGetAllFlavors(t *testing.T) {
var currentPage int32
client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
return mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++
return mockResp(currentPage)
},
}
},
}
res, err := getAllFlavors(context.Background(), client, "pid", "reg")
if err != nil {
t.Errorf("getAllFlavors() unexpected error: %v", err)
}
if len(res) != 3 {
t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
}
}
//
//import (
// "context"
// "testing"
//
// postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
//)
//
//type mockRequest struct {
// executeFunc func() (*postgresflex.GetFlavorsResponse, error)
//}
//
//func (m *mockRequest) Page(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
//func (m *mockRequest) Size(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
//func (m *mockRequest) Sort(_ postgresflex.FlavorSort) postgresflex.ApiGetFlavorsRequestRequest {
// return m
//}
//func (m *mockRequest) Execute() (*postgresflex.GetFlavorsResponse, error) {
// return m.executeFunc()
//}
//
//type mockFlavorsClient struct {
// executeRequest func() postgresflex.ApiGetFlavorsRequestRequest
//}
//
//func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) postgresflex.ApiGetFlavorsRequestRequest {
// return m.executeRequest()
//}
//
//var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) {
// if page == 1 {
// return &postgresflex.GetFlavorsResponse{
// Flavors: []postgresflex.ListFlavors{
// {Id: "flavor-1", Description: "first"},
// {Id: "flavor-2", Description: "second"},
// },
// }, nil
// }
// if page == 2 {
// return &postgresflex.GetFlavorsResponse{
// Flavors: []postgresflex.ListFlavors{
// {Id: "flavor-3", Description: "three"},
// },
// }, nil
// }
//
// return &postgresflex.GetFlavorsResponse{
// Flavors: []postgresflex.ListFlavors{},
// }, nil
//}
//
//func TestGetFlavorsByFilter(t *testing.T) {
// tests := []struct {
// description string
// projectId string
// region string
// mockErr error
// filter func(postgresflex.ListFlavors) bool
// wantCount int
// wantErr bool
// }{
// {
// description: "Success - Get all flavors (2 pages)",
// projectId: "pid", region: "reg",
// filter: func(_ postgresflex.ListFlavors) bool { return true },
// wantCount: 3,
// wantErr: false,
// },
// {
// description: "Success - Filter flavors by description",
// projectId: "pid", region: "reg",
// filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" },
// wantCount: 1,
// wantErr: false,
// },
// {
// description: "Error - Missing parameters",
// projectId: "", region: "reg",
// wantErr: true,
// },
// }
//
// for _, tt := range tests {
// t.Run(
// tt.description, func(t *testing.T) {
// var currentPage int32
// client := &mockFlavorsClient{
// executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
// return mockRequest{
// executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
// currentPage++
// return mockResp(currentPage)
// },
// }
// },
// }
// actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter)
//
// if (err != nil) != tt.wantErr {
// t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr)
// return
// }
//
// if !tt.wantErr && len(actual) != tt.wantCount {
// t.Errorf("getFlavorsByFilter() got %d flavors, want %d", len(actual), tt.wantCount)
// }
// },
// )
// }
//}
//
//func TestGetAllFlavors(t *testing.T) {
// var currentPage int32
// client := &mockFlavorsClient{
// executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
// return mockRequest{
// executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
// currentPage++
// return mockResp(currentPage)
// },
// }
// },
// }
//
// res, err := getAllFlavors(context.Background(), client, "pid", "reg")
// if err != nil {
// t.Errorf("getAllFlavors() unexpected error: %v", err)
// }
// if len(res) != 3 {
// t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
// }
//}

View file

@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"flavors": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"cpu": schema.Int32Attribute{
"cpu": schema.Int64Attribute{
Computed: true,
Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.",
@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
"max_gb": schema.Int32Attribute{
"max_gb": schema.Int64Attribute{
Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
"memory": schema.Int32Attribute{
"memory": schema.Int64Attribute{
Computed: true,
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
},
"min_gb": schema.Int32Attribute{
"min_gb": schema.Int64Attribute{
Computed: true,
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"class": schema.StringAttribute{
Computed: true,
},
"max_io_per_sec": schema.Int32Attribute{
"max_io_per_sec": schema.Int64Attribute{
Computed: true,
},
"max_through_in_mb": schema.Int32Attribute{
"max_through_in_mb": schema.Int64Attribute{
Computed: true,
},
},
@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of flavors available for the project.",
MarkdownDescription: "List of flavors available for the project.",
},
"page": schema.Int32Attribute{
"page": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"page": schema.Int32Attribute{
"page": schema.Int64Attribute{
Computed: true,
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
"total_pages": schema.Int32Attribute{
"total_pages": schema.Int64Attribute{
Computed: true,
},
"total_rows": schema.Int32Attribute{
"total_rows": schema.Int64Attribute{
Computed: true,
},
},
@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@ -176,11 +176,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
type FlavorsModel struct {
Flavors types.List `tfsdk:"flavors"`
Page types.Int32 `tfsdk:"page"`
Page types.Int64 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Size types.Int32 `tfsdk:"size"`
Size types.Int64 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@ -219,12 +219,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@ -273,12 +273,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@ -291,12 +291,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@ -309,12 +309,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@ -443,12 +443,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@ -497,12 +497,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@ -515,12 +515,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@ -533,12 +533,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@ -662,12 +662,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = FlavorsValue{}
type FlavorsValue struct {
Cpu basetypes.Int32Value `tfsdk:"cpu"`
Cpu basetypes.Int64Value `tfsdk:"cpu"`
Description basetypes.StringValue `tfsdk:"description"`
Id basetypes.StringValue `tfsdk:"id"`
MaxGb basetypes.Int32Value `tfsdk:"max_gb"`
Memory basetypes.Int32Value `tfsdk:"memory"`
MinGb basetypes.Int32Value `tfsdk:"min_gb"`
MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
Memory basetypes.Int64Value `tfsdk:"memory"`
MinGb basetypes.Int64Value `tfsdk:"min_gb"`
NodeType basetypes.StringValue `tfsdk:"node_type"`
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
state attr.ValueState
@ -679,12 +679,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var val tftypes.Value
var err error
attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["storage_classes"] = basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@ -819,12 +819,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
}
attributeTypes := map[string]attr.Type{
"cpu": basetypes.Int32Type{},
"cpu": basetypes.Int64Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
"max_gb": basetypes.Int32Type{},
"memory": basetypes.Int32Type{},
"min_gb": basetypes.Int32Type{},
"max_gb": basetypes.Int64Type{},
"memory": basetypes.Int64Type{},
"min_gb": basetypes.Int64Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@ -915,12 +915,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"cpu": basetypes.Int32Type{},
"cpu": basetypes.Int64Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
"max_gb": basetypes.Int32Type{},
"memory": basetypes.Int32Type{},
"min_gb": basetypes.Int32Type{},
"max_gb": basetypes.Int64Type{},
"memory": basetypes.Int64Type{},
"min_gb": basetypes.Int64Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@ -981,12 +981,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -999,12 +999,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@ -1110,12 +1110,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -1128,12 +1128,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@ -1217,8 +1217,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
type StorageClassesValue struct {
Class basetypes.StringValue `tfsdk:"class"`
MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"`
MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"`
MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
state attr.ValueState
}
@ -1229,8 +1229,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
var err error
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1293,8 +1293,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
attributeTypes := map[string]attr.Type{
"class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int32Type{},
"max_through_in_mb": basetypes.Int32Type{},
"max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int64Type{},
}
if v.IsNull() {
@ -1357,8 +1357,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int32Type{},
"max_through_in_mb": basetypes.Int32Type{},
"max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int64Type{},
}
}
@ -1397,12 +1397,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
pageVal, ok := pageAttribute.(basetypes.Int32Value)
pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -1415,12 +1415,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -1451,12 +1451,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -1469,12 +1469,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -1564,12 +1564,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
pageVal, ok := pageAttribute.(basetypes.Int32Value)
pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -1582,12 +1582,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -1618,12 +1618,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -1636,12 +1636,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -1726,11 +1726,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
Page basetypes.Int32Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"`
Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int64Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
state attr.ValueState
}
@ -1740,11 +1740,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1822,11 +1822,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
}
if v.IsNull() {
@ -1898,10 +1898,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
}
}

View file

@ -40,7 +40,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The host of the instance.",
MarkdownDescription: "The host of the instance.",
},
"port": schema.Int32Attribute{
"port": schema.Int64Attribute{
Computed: true,
Description: "The port of the instance.",
MarkdownDescription: "The port of the instance.",
@ -164,12 +164,12 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"replicas": schema.Int32Attribute{
"replicas": schema.Int64Attribute{
Computed: true,
Description: "How many replicas the instance should have.",
MarkdownDescription: "How many replicas the instance should have.",
},
"retention_days": schema.Int32Attribute{
"retention_days": schema.Int64Attribute{
Computed: true,
Description: "How long backups are retained. The value can only be between 32 and 365 days.",
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 365 days.",
@ -186,7 +186,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The storage class for the storage.",
MarkdownDescription: "The storage class for the storage.",
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Computed: true,
Description: "The storage size in Gigabytes.",
MarkdownDescription: "The storage size in Gigabytes.",
@ -223,8 +223,8 @@ type InstanceModel struct {
Network NetworkValue `tfsdk:"network"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Replicas types.Int32 `tfsdk:"replicas"`
RetentionDays types.Int32 `tfsdk:"retention_days"`
Replicas types.Int64 `tfsdk:"replicas"`
RetentionDays types.Int64 `tfsdk:"retention_days"`
Status types.String `tfsdk:"status"`
Storage StorageValue `tfsdk:"storage"`
Version types.String `tfsdk:"version"`
@ -634,12 +634,12 @@ func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int32Value)
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
@ -744,12 +744,12 @@ func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]at
return NewWriteValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int32Value)
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
@ -832,7 +832,7 @@ var _ basetypes.ObjectValuable = WriteValue{}
type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"`
Port basetypes.Int32Value `tfsdk:"port"`
Port basetypes.Int64Value `tfsdk:"port"`
state attr.ValueState
}
@ -843,7 +843,7 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
var err error
attrTypes["host"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["port"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["port"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -898,7 +898,7 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
attributeTypes := map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int32Type{},
"port": basetypes.Int64Type{},
}
if v.IsNull() {
@ -956,7 +956,7 @@ func (v WriteValue) Type(ctx context.Context) attr.Type {
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int32Type{},
"port": basetypes.Int64Type{},
}
}
@ -2020,12 +2020,12 @@ func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@ -2130,12 +2130,12 @@ func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewStorageValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@ -2218,7 +2218,7 @@ var _ basetypes.ObjectValuable = StorageValue{}
type StorageValue struct {
PerformanceClass basetypes.StringValue `tfsdk:"performance_class"`
Size basetypes.Int32Value `tfsdk:"size"`
Size basetypes.Int64Value `tfsdk:"size"`
state attr.ValueState
}
@ -2229,7 +2229,7 @@ func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var err error
attrTypes["performance_class"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -2284,7 +2284,7 @@ func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
attributeTypes := map[string]attr.Type{
"performance_class": basetypes.StringType{},
"size": basetypes.Int32Type{},
"size": basetypes.Int64Type{},
}
if v.IsNull() {
@ -2342,6 +2342,6 @@ func (v StorageValue) Type(ctx context.Context) attr.Type {
func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"performance_class": basetypes.StringType{},
"size": basetypes.Int32Type{},
"size": basetypes.Int64Type{},
}
}

View file

@ -54,7 +54,7 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of owned instances and their current status.",
MarkdownDescription: "List of owned instances and their current status.",
},
"page": schema.Int32Attribute{
"page": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@ -62,19 +62,19 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"page": schema.Int32Attribute{
"page": schema.Int64Attribute{
Computed: true,
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
"total_pages": schema.Int32Attribute{
"total_pages": schema.Int64Attribute{
Computed: true,
},
"total_rows": schema.Int32Attribute{
"total_rows": schema.Int64Attribute{
Computed: true,
},
},
@ -100,7 +100,7 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@ -130,11 +130,11 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
type InstancesModel struct {
Instances types.List `tfsdk:"instances"`
Page types.Int32 `tfsdk:"page"`
Page types.Int64 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Size types.Int32 `tfsdk:"size"`
Size types.Int64 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@ -662,12 +662,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
pageVal, ok := pageAttribute.(basetypes.Int32Value)
pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -680,12 +680,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -716,12 +716,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -734,12 +734,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -829,12 +829,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
pageVal, ok := pageAttribute.(basetypes.Int32Value)
pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -847,12 +847,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -883,12 +883,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -901,12 +901,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -991,11 +991,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
Page basetypes.Int32Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"`
Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int64Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
state attr.ValueState
}
@ -1005,11 +1005,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1087,11 +1087,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
}
if v.IsNull() {
@ -1163,10 +1163,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
}
}

View file

@ -41,11 +41,13 @@ func mapGetInstanceResponseToModel(
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust(
postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"write": postgresflexalpharesource.NewWriteValueMust(
// careful - we can not use NewWriteValueMust here
"write": basetypes.NewObjectValueMust(
postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"host": types.StringValue(resp.ConnectionInfo.Write.Host),
"port": types.Int32Value(resp.ConnectionInfo.Write.Port),
// note: IDE does not show that port is actually an int64 in the Schema
"port": types.Int64Value(int64(resp.ConnectionInfo.Write.Port)),
},
),
},
@ -95,8 +97,8 @@ func mapGetInstanceResponseToModel(
}
m.Network = net
m.Replicas = types.Int32Value(int32(resp.GetReplicas()))
m.RetentionDays = types.Int32Value(resp.GetRetentionDays())
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
m.Name = types.StringValue(resp.GetName())
@ -106,7 +108,7 @@ func mapGetInstanceResponseToModel(
postgresflexalpharesource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
"size": types.Int32Value(resp.Storage.GetSize()),
"size": types.Int64Value(int64(resp.Storage.GetSize())),
},
)
if diags.HasError() {
@ -138,14 +140,14 @@ func mapGetDataInstanceResponseToModel(
return err
}
m.Replicas = types.Int32Value(int32(resp.GetReplicas()))
m.RetentionDays = types.Int32Value(resp.GetRetentionDays())
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
m.Status = types.StringValue(string(resp.GetStatus()))
storage, diags := postgresflexalphadatasource.NewStorageValue(
postgresflexalphadatasource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
"size": types.Int32Value(resp.Storage.GetSize()),
"size": types.Int64Value(int64(resp.Storage.GetSize())),
},
)
if diags.HasError() {

View file

@ -13,6 +13,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
@ -206,7 +207,7 @@ func (r *instanceResource) Create(
return
}
replVal := model.Replicas.ValueInt32() // nolint:gosec // check is performed above
replVal := model.Replicas.ValueInt64() // nolint:gosec // check is performed above
payload := modelToCreateInstancePayload(netAcl, model, replVal)
// Create new instance
@ -221,7 +222,7 @@ func (r *instanceResource) Create(
}
ctx = core.LogResponse(ctx)
instanceId, ok := createResp.GetIdOk()
instanceID, ok := createResp.GetIdOk()
if !ok {
core.LogAndAddError(ctx, &resp.Diagnostics, "error creating instance", "could not find instance id in response")
return
@ -231,14 +232,14 @@ func (r *instanceResource) Create(
identity := InstanceResourceIdentityModel{
ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
InstanceID: types.StringPointerValue(instanceId),
InstanceID: types.StringPointerValue(instanceID),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
return
}
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectId, region, *instanceId).
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectId, region, *instanceID).
WaitWithContext(ctx)
if err != nil {
core.LogAndAddError(
@ -271,9 +272,9 @@ func (r *instanceResource) Create(
}
func modelToCreateInstancePayload(
netAcl []string,
netACL []string,
model postgresflexalpha.InstanceModel,
replVal int32,
replVal int64,
) v3alpha1api.CreateInstanceRequestPayload {
var enc *v3alpha1api.InstanceEncryption
if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
@ -291,13 +292,13 @@ func modelToCreateInstancePayload(
Name: model.Name.ValueString(),
Network: v3alpha1api.InstanceNetworkCreate{
AccessScope: (*v3alpha1api.InstanceNetworkAccessScope)(model.Network.AccessScope.ValueStringPointer()),
Acl: netAcl,
Acl: netACL,
},
Replicas: v3alpha1api.Replicas(replVal),
RetentionDays: model.RetentionDays.ValueInt32(),
RetentionDays: int32(model.RetentionDays.ValueInt64()),
Storage: v3alpha1api.StorageCreate{
PerformanceClass: model.Storage.PerformanceClass.ValueString(),
Size: model.Storage.Size.ValueInt32(),
Size: int32(model.Storage.Size.ValueInt64()),
},
Version: model.Version.ValueString(),
}
@ -438,7 +439,7 @@ func (r *instanceResource) Update(
return
}
replInt32 := model.Replicas.ValueInt32()
replInt32 := model.Replicas.ValueInt64()
payload := v3alpha1api.UpdateInstanceRequestPayload{
BackupSchedule: model.BackupSchedule.ValueString(),
FlavorId: model.FlavorId.ValueString(),
@ -447,9 +448,9 @@ func (r *instanceResource) Update(
Acl: netAcl,
},
Replicas: v3alpha1api.Replicas(replInt32),
RetentionDays: model.RetentionDays.ValueInt32(),
RetentionDays: int32(model.RetentionDays.ValueInt64()),
Storage: v3alpha1api.StorageUpdate{
Size: model.Storage.Size.ValueInt32Pointer(),
Size: coreUtils.Ptr(int32(model.Storage.Size.ValueInt64())),
},
Version: model.Version.ValueString(),
}

View file

@ -5,7 +5,7 @@ package postgresflexalpha
import (
"context"
"fmt"
"github.com/hashicorp/terraform-plugin-framework-validators/int32validator"
"github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/diag"
@ -42,7 +42,7 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
Description: "The host of the instance.",
MarkdownDescription: "The host of the instance.",
},
"port": schema.Int32Attribute{
"port": schema.Int64Attribute{
Computed: true,
Description: "The port of the instance.",
MarkdownDescription: "The port of the instance.",
@ -178,18 +178,18 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
),
},
},
"replicas": schema.Int32Attribute{
"replicas": schema.Int64Attribute{
Required: true,
Description: "How many replicas the instance should have.",
MarkdownDescription: "How many replicas the instance should have.",
Validators: []validator.Int32{
int32validator.OneOf(
Validators: []validator.Int64{
int64validator.OneOf(
1,
3,
),
},
},
"retention_days": schema.Int32Attribute{
"retention_days": schema.Int64Attribute{
Required: true,
Description: "How long backups are retained. The value can only be between 32 and 365 days.",
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 365 days.",
@ -206,7 +206,7 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
Description: "The storage class for the storage.",
MarkdownDescription: "The storage class for the storage.",
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Required: true,
Description: "The storage size in Gigabytes.",
MarkdownDescription: "The storage size in Gigabytes.",
@ -243,8 +243,8 @@ type InstanceModel struct {
Network NetworkValue `tfsdk:"network"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Replicas types.Int32 `tfsdk:"replicas"`
RetentionDays types.Int32 `tfsdk:"retention_days"`
Replicas types.Int64 `tfsdk:"replicas"`
RetentionDays types.Int64 `tfsdk:"retention_days"`
Status types.String `tfsdk:"status"`
Storage StorageValue `tfsdk:"storage"`
Version types.String `tfsdk:"version"`
@ -654,12 +654,12 @@ func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int32Value)
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
@ -764,12 +764,12 @@ func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]at
return NewWriteValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int32Value)
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int32Value, was: %T`, portAttribute))
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
@ -852,7 +852,7 @@ var _ basetypes.ObjectValuable = WriteValue{}
type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"`
Port basetypes.Int32Value `tfsdk:"port"`
Port basetypes.Int64Value `tfsdk:"port"`
state attr.ValueState
}
@ -863,7 +863,7 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
var err error
attrTypes["host"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["port"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["port"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -918,7 +918,7 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
attributeTypes := map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int32Type{},
"port": basetypes.Int64Type{},
}
if v.IsNull() {
@ -976,7 +976,7 @@ func (v WriteValue) Type(ctx context.Context) attr.Type {
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int32Type{},
"port": basetypes.Int64Type{},
}
}
@ -2040,12 +2040,12 @@ func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@ -2150,12 +2150,12 @@ func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewStorageValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
if diags.HasError() {
@ -2238,7 +2238,7 @@ var _ basetypes.ObjectValuable = StorageValue{}
type StorageValue struct {
PerformanceClass basetypes.StringValue `tfsdk:"performance_class"`
Size basetypes.Int32Value `tfsdk:"size"`
Size basetypes.Int64Value `tfsdk:"size"`
state attr.ValueState
}
@ -2249,7 +2249,7 @@ func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var err error
attrTypes["performance_class"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -2304,7 +2304,7 @@ func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
attributeTypes := map[string]attr.Type{
"performance_class": basetypes.StringType{},
"size": basetypes.Int32Type{},
"size": basetypes.Int64Type{},
}
if v.IsNull() {
@ -2362,6 +2362,6 @@ func (v StorageValue) Type(ctx context.Context) attr.Type {
func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"performance_class": basetypes.StringType{},
"size": basetypes.Int32Type{},
"size": basetypes.Int64Type{},
}
}

View file

@ -101,24 +101,24 @@ func (r *userDataSource) Read(
ctx = core.InitProviderContext(ctx)
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
userId64 := model.UserId.ValueInt32()
if userId64 > math.MaxInt32 {
projectID := model.ProjectId.ValueString()
instanceID := model.InstanceId.ValueString()
userID64 := model.UserId.ValueInt64()
if userID64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return
}
userId := int32(userId64) // nolint:gosec // check is performed above
userID := int32(userID64) // nolint:gosec // check is performed above
region := r.providerData.GetRegionWithOverride(model.Region)
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "project_id", projectID)
ctx = tflog.SetField(ctx, "instance_id", instanceID)
ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "user_id", userId)
ctx = tflog.SetField(ctx, "user_id", userID)
recordSetResp, err := r.client.DefaultAPI.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
recordSetResp, err := r.client.DefaultAPI.GetUserRequest(ctx, projectID, region, instanceID, userID).Execute()
if err != nil {
handleReadError(ctx, &diags, err, projectId, instanceId, userId)
handleReadError(ctx, &diags, err, projectID, instanceID, userID)
resp.State.RemoveResource(ctx)
return
}
@ -151,8 +151,8 @@ func handleReadError(
ctx context.Context,
diags *diag.Diagnostics,
err error,
projectId, instanceId string,
userId int32,
projectID, instanceID string,
userID int32,
) {
utils.LogError(
ctx,
@ -161,23 +161,23 @@ func handleReadError(
"Reading user",
fmt.Sprintf(
"User with ID %q or instance with ID %q does not exist in project %q.",
userId,
instanceId,
projectId,
userID,
instanceID,
projectID,
),
map[int]string{
http.StatusBadRequest: fmt.Sprintf(
"Invalid user request parameters for project %q and instance %q.",
projectId,
instanceId,
projectID,
instanceID,
),
http.StatusNotFound: fmt.Sprintf(
"User, instance %q, or project %q or user %q not found.",
instanceId,
projectId,
userId,
instanceID,
projectID,
userID,
),
http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectID),
},
)
}

View file

@ -14,7 +14,7 @@ import (
func UserDataSourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
"tf_original_api_id": schema.Int32Attribute{
"tf_original_api_id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@ -55,7 +55,7 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The current status of the user.",
MarkdownDescription: "The current status of the user.",
},
"user_id": schema.Int32Attribute{
"user_id": schema.Int64Attribute{
Required: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@ -65,12 +65,12 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
}
type UserModel struct {
Id types.Int32 `tfsdk:"tf_original_api_id"`
Id types.Int64 `tfsdk:"tf_original_api_id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Roles types.List `tfsdk:"roles"`
Status types.String `tfsdk:"status"`
UserId types.Int32 `tfsdk:"user_id"`
UserId types.Int64 `tfsdk:"user_id"`
}

View file

@ -25,7 +25,7 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
},
"page": schema.Int32Attribute{
"page": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@ -33,19 +33,19 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"page": schema.Int32Attribute{
"page": schema.Int64Attribute{
Computed: true,
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
"total_pages": schema.Int32Attribute{
"total_pages": schema.Int64Attribute{
Computed: true,
},
"total_rows": schema.Int32Attribute{
"total_rows": schema.Int64Attribute{
Computed: true,
},
},
@ -71,7 +71,7 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
"size": schema.Int32Attribute{
"size": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@ -96,7 +96,7 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
"users": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"id": schema.Int32Attribute{
"id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@ -128,11 +128,11 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
type UsersModel struct {
InstanceId types.String `tfsdk:"instance_id"`
Page types.Int32 `tfsdk:"page"`
Page types.Int64 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
Size types.Int32 `tfsdk:"size"`
Size types.Int64 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
Users types.List `tfsdk:"users"`
}
@ -172,12 +172,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
pageVal, ok := pageAttribute.(basetypes.Int32Value)
pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -190,12 +190,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -226,12 +226,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -244,12 +244,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -339,12 +339,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
pageVal, ok := pageAttribute.(basetypes.Int32Value)
pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@ -357,12 +357,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@ -393,12 +393,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@ -411,12 +411,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@ -501,11 +501,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
Page basetypes.Int32Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"`
Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int64Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
state attr.ValueState
}
@ -515,11 +515,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -597,11 +597,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
}
if v.IsNull() {
@ -673,11 +673,11 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"page": basetypes.Int32Type{},
"size": basetypes.Int32Type{},
"page": basetypes.Int64Type{},
"size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{},
"total_rows": basetypes.Int32Type{},
"total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int64Type{},
}
}
@ -716,12 +716,12 @@ func (t UsersType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue
return nil, diags
}
idVal, ok := idAttribute.(basetypes.Int32Value)
idVal, ok := idAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@ -845,12 +845,12 @@ func NewUsersValue(attributeTypes map[string]attr.Type, attributes map[string]at
return NewUsersValueUnknown(), diags
}
idVal, ok := idAttribute.(basetypes.Int32Value)
idVal, ok := idAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`id expected to be basetypes.Int32Value, was: %T`, idAttribute))
fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
}
nameAttribute, ok := attributes["name"]
@ -969,7 +969,7 @@ func (t UsersType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = UsersValue{}
type UsersValue struct {
Id basetypes.Int32Value `tfsdk:"id"`
Id basetypes.Int64Value `tfsdk:"id"`
Name basetypes.StringValue `tfsdk:"name"`
Status basetypes.StringValue `tfsdk:"status"`
state attr.ValueState
@ -981,7 +981,7 @@ func (v UsersValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
var val tftypes.Value
var err error
attrTypes["id"] = basetypes.Int32Type{}.TerraformType(ctx)
attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx)
@ -1045,7 +1045,7 @@ func (v UsersValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
"id": basetypes.Int32Type{},
"id": basetypes.Int64Type{},
"name": basetypes.StringType{},
"status": basetypes.StringType{},
}
@ -1109,7 +1109,7 @@ func (v UsersValue) Type(ctx context.Context) attr.Type {
func (v UsersValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"id": basetypes.Int32Type{},
"id": basetypes.Int64Type{},
"name": basetypes.StringType{},
"status": basetypes.StringType{},
}

View file

@ -2,6 +2,7 @@ package postgresflexalpha
import (
"fmt"
"strconv"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types"
@ -21,18 +22,18 @@ func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourc
}
user := userResp
var userId int32
if model.UserId.ValueInt32() != 0 {
userId = model.UserId.ValueInt32()
var userID int64
if model.UserId.ValueInt64() != 0 {
userID = model.UserId.ValueInt64()
} else {
return fmt.Errorf("user id not present")
}
model.TerraformID = utils.BuildInternalTerraformId(
model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), string(userId),
model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userID, 10),
)
model.UserId = types.Int32Value(userId)
model.UserId = types.Int64Value(userID)
model.Name = types.StringValue(user.GetName())
if user.Roles == nil {
@ -49,7 +50,7 @@ func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourc
model.Roles = types.List(rolesSet)
}
model.Id = types.Int32Value(userId)
model.Id = types.Int64Value(userID)
model.Region = types.StringValue(region)
model.Status = types.StringValue(user.GetStatus())
return nil
@ -107,17 +108,17 @@ func mapResourceFields(userResp *v3alpha1api.GetUserResponse, model *resourceMod
}
user := userResp
var userId int32
if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt32() != 0 {
userId = model.UserId.ValueInt32()
var userID int64
if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt64() != 0 {
userID = model.UserId.ValueInt64()
} else if user.Id != 0 {
userId = user.Id
userID = int64(user.Id)
} else {
return fmt.Errorf("user id not present")
}
model.Id = types.Int32Value(userId)
model.UserId = types.Int32Value(userId)
model.Id = types.Int64Value(userID)
model.UserId = types.Int64Value(userID)
model.Name = types.StringValue(user.Name)
if user.Roles == nil {

View file

@ -8,7 +8,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
)
@ -28,8 +28,8 @@ func TestMapDataSourceFields(t *testing.T) {
testRegion,
dataSourceModel{
UserModel: data.UserModel{
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue(""),
@ -44,18 +44,18 @@ func TestMapDataSourceFields(t *testing.T) {
{
"simple_values",
&postgresflex.GetUserResponse{
Roles: &[]postgresflex.UserRole{
Roles: []postgresflex.UserRole{
"role_1",
"role_2",
"",
},
Name: utils.Ptr("username"),
Name: "username",
},
testRegion,
dataSourceModel{
UserModel: data.UserModel{
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue("username"),
@ -78,16 +78,16 @@ func TestMapDataSourceFields(t *testing.T) {
{
"null_fields_and_int_conversions",
&postgresflex.GetUserResponse{
Id: utils.Ptr(int32(1)),
Roles: &[]postgresflex.UserRole{},
Name: nil,
Status: utils.Ptr("status"),
Id: int32(1),
Roles: []postgresflex.UserRole{},
Name: "",
Status: "status",
},
testRegion,
dataSourceModel{
UserModel: data.UserModel{
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue(""),
@ -161,12 +161,12 @@ func TestMapFieldsCreate(t *testing.T) {
{
"default_values",
&postgresflex.GetUserResponse{
Id: utils.Ptr(Int32(1)),
Id: int32(1),
},
testRegion,
resourceModel{
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@ -181,14 +181,14 @@ func TestMapFieldsCreate(t *testing.T) {
{
"simple_values",
&postgresflex.GetUserResponse{
Id: utils.Ptr(Int32(1)),
Name: utils.Ptr("username"),
Status: utils.Ptr("status"),
Id: int32(1),
Name: "username",
Status: "status",
},
testRegion,
resourceModel{
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue("username"),
@ -203,14 +203,14 @@ func TestMapFieldsCreate(t *testing.T) {
{
"null_fields_and_int_conversions",
&postgresflex.GetUserResponse{
Id: utils.Ptr(Int32(1)),
Name: nil,
Status: nil,
Id: int32(1),
Name: "",
Status: "",
},
testRegion,
resourceModel{
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@ -282,12 +282,12 @@ func TestMapFields(t *testing.T) {
{
"default_values",
&postgresflex.GetUserResponse{
Id: utils.Ptr(Int32(1)),
Id: int32(1),
},
testRegion,
resourceModel{
Id: types.Int32Value(1),
UserId: types.Int32Value(Int32(1)),
Id: types.Int64Value(1),
UserId: types.Int64Value(int64(1)),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@ -301,18 +301,18 @@ func TestMapFields(t *testing.T) {
{
"simple_values",
&postgresflex.GetUserResponse{
Id: utils.Ptr(Int32(1)),
Roles: &[]postgresflex.UserRole{
Id: int32(1),
Roles: []postgresflex.UserRole{
"role_1",
"role_2",
"",
},
Name: utils.Ptr("username"),
Name: "username",
},
testRegion,
resourceModel{
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringValue("username"),
@ -334,13 +334,13 @@ func TestMapFields(t *testing.T) {
{
"null_fields_and_int_conversions",
&postgresflex.GetUserResponse{
Id: utils.Ptr(Int32(1)),
Name: nil,
Id: int32(1),
Name: "",
},
testRegion,
resourceModel{
Id: types.Int32Value(1),
UserId: types.Int32Value(1),
Id: types.Int64Value(1),
UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"),
Name: types.StringNull(),
@ -402,17 +402,17 @@ func TestToCreatePayload(t *testing.T) {
tests := []struct {
description string
input *resourceModel
inputRoles *[]string
inputRoles []string
expected *postgresflex.CreateUserRequestPayload
isValid bool
}{
{
"default_values",
&resourceModel{},
&[]string{},
[]string{},
&postgresflex.CreateUserRequestPayload{
Name: nil,
Roles: &[]postgresflex.UserRole{},
Name: "",
Roles: []postgresflex.UserRole{},
},
true,
},
@ -421,13 +421,13 @@ func TestToCreatePayload(t *testing.T) {
&resourceModel{
Name: types.StringValue("username"),
},
&[]string{
[]string{
"role_1",
"role_2",
},
&postgresflex.CreateUserRequestPayload{
Name: utils.Ptr("username"),
Roles: &[]postgresflex.UserRole{
Name: "username",
Roles: []postgresflex.UserRole{
"role_1",
"role_2",
},
@ -439,21 +439,21 @@ func TestToCreatePayload(t *testing.T) {
&resourceModel{
Name: types.StringNull(),
},
&[]string{
[]string{
"",
},
&postgresflex.CreateUserRequestPayload{
Roles: &[]postgresflex.UserRole{
Roles: []postgresflex.UserRole{
"",
},
Name: nil,
Name: "",
},
true,
},
{
"nil_model",
nil,
&[]string{},
[]string{},
nil,
false,
},
@ -490,16 +490,16 @@ func TestToUpdatePayload(t *testing.T) {
tests := []struct {
description string
input *resourceModel
inputRoles *[]string
inputRoles []string
expected *postgresflex.UpdateUserRequestPayload
isValid bool
}{
{
"default_values",
&resourceModel{},
&[]string{},
[]string{},
&postgresflex.UpdateUserRequestPayload{
Roles: &[]postgresflex.UserRole{},
Roles: []postgresflex.UserRole{},
},
true,
},
@ -508,13 +508,13 @@ func TestToUpdatePayload(t *testing.T) {
&resourceModel{
Name: types.StringValue("username"),
},
&[]string{
[]string{
"role_1",
"role_2",
},
&postgresflex.UpdateUserRequestPayload{
Name: utils.Ptr("username"),
Roles: &[]postgresflex.UserRole{
Roles: []postgresflex.UserRole{
"role_1",
"role_2",
},
@ -526,11 +526,11 @@ func TestToUpdatePayload(t *testing.T) {
&resourceModel{
Name: types.StringNull(),
},
&[]string{
[]string{
"",
},
&postgresflex.UpdateUserRequestPayload{
Roles: &[]postgresflex.UserRole{
Roles: []postgresflex.UserRole{
"",
},
},
@ -539,7 +539,7 @@ func TestToUpdatePayload(t *testing.T) {
{
"nil_model",
nil,
&[]string{},
[]string{},
nil,
false,
},

View file

@ -55,7 +55,7 @@ type UserResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
InstanceID types.String `tfsdk:"instance_id"`
UserID types.Int32 `tfsdk:"user_id"`
UserID types.Int64 `tfsdk:"user_id"`
}
// userResource implements the resource handling for a PostgreSQL Flex user.
@ -189,8 +189,8 @@ func (r *userResource) Create(
ctx = core.InitProviderContext(ctx)
arg := &clientArg{
projectId: model.ProjectId.ValueString(),
instanceId: model.InstanceId.ValueString(),
projectID: model.ProjectId.ValueString(),
instanceID: model.InstanceId.ValueString(),
region: r.providerData.GetRegionWithOverride(model.Region),
}
@ -211,9 +211,9 @@ func (r *userResource) Create(
// Create new user
userResp, err := r.client.DefaultAPI.CreateUserRequest(
ctx,
arg.projectId,
arg.projectID,
arg.region,
arg.instanceId,
arg.instanceID,
).CreateUserRequestPayload(*payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
@ -230,7 +230,7 @@ func (r *userResource) Create(
)
return
}
arg.userId = *id
arg.userID = int64(*id)
ctx = tflog.SetField(ctx, "user_id", id)
@ -238,28 +238,28 @@ func (r *userResource) Create(
// Set data returned by API in identity
identity := UserResourceIdentityModel{
ProjectID: types.StringValue(arg.projectId),
ProjectID: types.StringValue(arg.projectID),
Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceId),
UserID: types.Int32Value(*id),
InstanceID: types.StringValue(arg.instanceID),
UserID: types.Int64Value(int64(*id)),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
return
}
model.Id = types.Int32Value(*id)
model.UserId = types.Int32Value(*id)
model.Id = types.Int64Value(int64(*id))
model.UserId = types.Int64Value(int64(*id))
model.Password = types.StringValue(userResp.GetPassword())
model.Status = types.StringValue(userResp.GetStatus())
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx,
r.client.DefaultAPI,
arg.projectId,
arg.instanceId,
arg.projectID,
arg.instanceID,
arg.region,
*id,
int64(*id),
).SetSleepBeforeWait(
10 * time.Second,
).SetTimeout(
@ -324,8 +324,8 @@ func (r *userResource) Read(
ctx = core.InitProviderContext(ctx)
arg := &clientArg{
projectId: model.ProjectId.ValueString(),
instanceId: model.InstanceId.ValueString(),
projectID: model.ProjectId.ValueString(),
instanceID: model.InstanceId.ValueString(),
region: r.providerData.GetRegionWithOverride(model.Region),
}
@ -337,10 +337,10 @@ func (r *userResource) Read(
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx,
r.client.DefaultAPI,
arg.projectId,
arg.instanceId,
arg.projectID,
arg.instanceID,
arg.region,
model.UserId.ValueInt32(),
model.UserId.ValueInt64(),
).SetSleepBeforeWait(
10 * time.Second,
).SetTimeout(
@ -357,7 +357,7 @@ func (r *userResource) Read(
return
}
if waitResp.Id != model.UserId.ValueInt32() {
if int64(waitResp.Id) != model.UserId.ValueInt64() {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@ -366,16 +366,16 @@ func (r *userResource) Read(
)
return
}
arg.userId = waitResp.Id
arg.userID = int64(waitResp.Id)
ctx = core.LogResponse(ctx)
// Set data returned by API in identity
identity := UserResourceIdentityModel{
ProjectID: types.StringValue(arg.projectId),
ProjectID: types.StringValue(arg.projectID),
Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceId),
UserID: types.Int32Value(arg.userId),
InstanceID: types.StringValue(arg.instanceID),
UserID: types.Int64Value(arg.userID),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@ -407,8 +407,8 @@ func (r *userResource) Update(
ctx = core.InitProviderContext(ctx)
arg := &clientArg{
projectId: model.ProjectId.ValueString(),
instanceId: model.InstanceId.ValueString(),
projectID: model.ProjectId.ValueString(),
instanceID: model.InstanceId.ValueString(),
region: r.providerData.GetRegionWithOverride(model.Region),
}
@ -435,20 +435,20 @@ func (r *userResource) Update(
return
}
userId64 := arg.userId
if userId64 > math.MaxInt32 {
userID64 := arg.userID
if userID64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return
}
userId := int32(userId64) // nolint:gosec // check is performed above
userID := int32(userID64) // nolint:gosec // check is performed above
// Update existing instance
err = r.client.DefaultAPI.UpdateUserRequest(
ctx,
arg.projectId,
arg.projectID,
arg.region,
arg.instanceId,
userId,
arg.instanceID,
userID,
).UpdateUserRequestPayload(*payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", err.Error())
@ -459,10 +459,10 @@ func (r *userResource) Update(
// Set data returned by API in identity
identity := UserResourceIdentityModel{
ProjectID: types.StringValue(arg.projectId),
ProjectID: types.StringValue(arg.projectID),
Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceId),
UserID: types.Int32Value(userId64),
InstanceID: types.StringValue(arg.instanceID),
UserID: types.Int64Value(userID64),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
@ -473,10 +473,10 @@ func (r *userResource) Update(
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx,
r.client.DefaultAPI,
arg.projectId,
arg.instanceId,
arg.projectID,
arg.instanceID,
arg.region,
model.UserId.ValueInt32(),
model.UserId.ValueInt64(),
).SetSleepBeforeWait(
10 * time.Second,
).SetTimeout(
@ -493,7 +493,7 @@ func (r *userResource) Update(
return
}
if waitResp.Id != model.UserId.ValueInt32() {
if int64(waitResp.Id) != model.UserId.ValueInt64() {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@ -502,7 +502,7 @@ func (r *userResource) Update(
)
return
}
arg.userId = waitResp.Id
arg.userID = int64(waitResp.Id)
// Set state to fully populated data
diags = resp.State.Set(ctx, stateModel)
@ -547,15 +547,15 @@ func (r *userResource) Delete(
ctx = r.setTFLogFields(ctx, arg)
ctx = core.InitProviderContext(ctx)
userId64 := arg.userId
if userId64 > math.MaxInt32 {
userID64 := arg.userID
if userID64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return
}
userId := int32(userId64) // nolint:gosec // check is performed above
userID := int32(userID64) // nolint:gosec // check is performed above
// Delete existing record set
err := r.client.DefaultAPI.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
err := r.client.DefaultAPI.DeleteUserRequest(ctx, arg.projectID, arg.region, arg.instanceID, userID).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
}
@ -598,7 +598,7 @@ func (r *userResource) IdentitySchema(
"instance_id": identityschema.StringAttribute{
RequiredForImport: true,
},
"user_id": identityschema.Int32Attribute{
"user_id": identityschema.Int64Attribute{
RequiredForImport: true,
},
},
@ -607,10 +607,10 @@ func (r *userResource) IdentitySchema(
// clientArg holds the arguments for API calls.
type clientArg struct {
projectId string
instanceId string
projectID string
instanceID string
region string
userId int32
userID int64
}
// ImportState imports a resource into the Terraform state on success.
@ -637,7 +637,7 @@ func (r *userResource) ImportState(
return
}
userId, err := strconv.ParseInt(idParts[3], 10, 64)
userID, err := strconv.ParseInt(idParts[3], 10, 64)
if err != nil {
core.LogAndAddError(
ctx,
@ -651,7 +651,7 @@ func (r *userResource) ImportState(
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userID)...)
tflog.Info(ctx, "Postgres Flex user state imported")
@ -665,15 +665,15 @@ func (r *userResource) ImportState(
return
}
projectId := identityData.ProjectID.ValueString()
projectID := identityData.ProjectID.ValueString()
region := identityData.Region.ValueString()
instanceId := identityData.InstanceID.ValueString()
userId := identityData.UserID.ValueInt32()
instanceID := identityData.InstanceID.ValueString()
userID := identityData.UserID.ValueInt64()
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectID)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceID)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userID)...)
tflog.Info(ctx, "Postgres Flex user state imported")
}
@ -683,25 +683,24 @@ func (r *userResource) extractIdentityData(
model resourceModel,
identity UserResourceIdentityModel,
) (*clientArg, error) {
var projectId, region, instanceId string
var userId int32
var projectID, region, instanceID string
var userID int64
if !model.UserId.IsNull() && !model.UserId.IsUnknown() {
userId = model.UserId.ValueInt32()
userID = model.UserId.ValueInt64()
} else {
if identity.UserID.IsNull() || identity.UserID.IsUnknown() {
return nil, fmt.Errorf("user_id not found in config")
}
userId = identity.UserID.ValueInt32()
userID = identity.UserID.ValueInt64()
}
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
projectId = model.ProjectId.ValueString()
projectID = model.ProjectId.ValueString()
} else {
if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
return nil, fmt.Errorf("project_id not found in config")
}
projectId = identity.ProjectID.ValueString()
projectID = identity.ProjectID.ValueString()
}
if !model.Region.IsNull() && !model.Region.IsUnknown() {
@ -714,27 +713,27 @@ func (r *userResource) extractIdentityData(
}
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
instanceId = model.InstanceId.ValueString()
instanceID = model.InstanceId.ValueString()
} else {
if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
return nil, fmt.Errorf("instance_id not found in config")
}
instanceId = identity.InstanceID.ValueString()
instanceID = identity.InstanceID.ValueString()
}
return &clientArg{
projectId: projectId,
instanceId: instanceId,
projectID: projectID,
instanceID: instanceID,
region: region,
userId: userId,
userID: userID,
}, nil
}
// setTFLogFields adds relevant fields to the context for terraform logging purposes.
func (r *userResource) setTFLogFields(ctx context.Context, arg *clientArg) context.Context {
ctx = tflog.SetField(ctx, "project_id", arg.projectId)
ctx = tflog.SetField(ctx, "instance_id", arg.instanceId)
ctx = tflog.SetField(ctx, "project_id", arg.projectID)
ctx = tflog.SetField(ctx, "instance_id", arg.instanceID)
ctx = tflog.SetField(ctx, "region", arg.region)
ctx = tflog.SetField(ctx, "user_id", arg.userId)
ctx = tflog.SetField(ctx, "user_id", arg.userID)
return ctx
}

View file

@ -14,7 +14,7 @@ import (
func UserResourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
"id": schema.Int32Attribute{
"id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@ -64,7 +64,7 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
Description: "The current status of the user.",
MarkdownDescription: "The current status of the user.",
},
"user_id": schema.Int32Attribute{
"user_id": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "The ID of the user.",
@ -75,7 +75,7 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
}
type UserModel struct {
Id types.Int32 `tfsdk:"id"`
Id types.Int64 `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Password types.String `tfsdk:"password"`
@ -83,5 +83,5 @@ type UserModel struct {
Region types.String `tfsdk:"region"`
Roles types.List `tfsdk:"roles"`
Status types.String `tfsdk:"status"`
UserId types.Int32 `tfsdk:"user_id"`
UserId types.Int64 `tfsdk:"user_id"`
}

View file

@ -4,6 +4,7 @@ import (
"context"
"errors"
"fmt"
"math"
"net/http"
"time"
@ -207,11 +208,14 @@ func GetUserByIdWaitHandler(
ctx context.Context,
a APIClientUserInterface,
projectId, instanceId, region string,
userId int32,
userId int64,
) *wait.AsyncActionHandler[v3alpha1api.GetUserResponse] {
handler := wait.New(
func() (waitFinished bool, response *v3alpha1api.GetUserResponse, err error) {
userId32 := userId
if userId > math.MaxInt32 {
return false, nil, fmt.Errorf("userID too large for int32")
}
userId32 := int32(userId)
s, err := a.GetUserRequest(ctx, projectId, region, instanceId, userId32).Execute()
if err != nil {
var oapiErr *oapierror.GenericOpenAPIError
@ -243,11 +247,14 @@ func GetDatabaseByIdWaitHandler(
ctx context.Context,
a APIClientDatabaseInterface,
projectId, instanceId, region string,
databaseId int32,
databaseId int64,
) *wait.AsyncActionHandler[v3alpha1api.GetDatabaseResponse] {
handler := wait.New(
func() (waitFinished bool, response *v3alpha1api.GetDatabaseResponse, err error) {
dbId32 := databaseId
if databaseId > math.MaxInt32 {
return false, nil, fmt.Errorf("databaseID too large for int32")
}
dbId32 := int32(databaseId)
s, err := a.GetDatabaseRequest(ctx, projectId, region, instanceId, dbId32).Execute()
if err != nil {
var oapiErr *oapierror.GenericOpenAPIError

View file

@ -27,7 +27,8 @@ type apiClientInstanceMocked struct {
func (a *apiClientInstanceMocked) GetInstanceRequest(
_ context.Context,
_, _, _ string,
) *postgresflex.ApiGetInstanceRequestRequest {
) postgresflex.ApiGetInstanceRequestRequest {
return postgresflex.ApiGetInstanceRequestRequest{}
}
func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
@ -53,6 +54,13 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
}, nil
}
func (a *apiClientInstanceMocked) ListUsersRequest(
_ context.Context,
_, _, _ string,
) postgresflex.ApiListUsersRequestRequest {
return postgresflex.ApiListUsersRequestRequest{}
}
func (a *apiClientInstanceMocked) ListUsersRequestExecute(
_ context.Context,
_, _, _ string,
@ -210,13 +218,13 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
GetInstanceRequestExecuteMock: nil,
}
apiClient := &apiClientInstanceMocked{
instanceId: instanceId,
instanceState: tt.instanceState,
instanceNetwork: tt.instanceNetwork,
instanceGetFails: tt.instanceGetFails,
usersGetErrorStatus: tt.usersGetErrorStatus,
}
//apiClient := &apiClientInstanceMocked{
// instanceId: instanceId,
// instanceState: tt.instanceState,
// instanceNetwork: tt.instanceNetwork,
// instanceGetFails: tt.instanceGetFails,
// usersGetErrorStatus: tt.usersGetErrorStatus,
//}
handler := CreateInstanceWaitHandler(context.Background(), apiClientMock, "", "", instanceId)
@ -254,9 +262,9 @@ func TestUpdateInstanceWaitHandler(t *testing.T) {
},
wantErr: false,
wantRes: &postgresflex.GetInstanceResponse{
Id: utils.Ptr("foo-bar"),
Status: postgresflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(InstanceStateSuccess)),
Network: &postgresflex.InstanceNetwork{
Id: "foo-bar",
Status: postgresflex.Status(InstanceStateSuccess),
Network: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@ -276,9 +284,9 @@ func TestUpdateInstanceWaitHandler(t *testing.T) {
},
wantErr: true,
wantRes: &postgresflex.GetInstanceResponse{
Id: utils.Ptr("foo-bar"),
Status: postgresflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(InstanceStateFailed)),
Network: &postgresflex.InstanceNetwork{
Id: "foo-bar",
Status: postgresflex.Status(InstanceStateFailed),
Network: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),