package postgresflexalpha import ( "context" "fmt" "net/http" "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha" ) // Ensure the implementation satisfies the expected interfaces. var ( _ datasource.DataSource = &databaseDataSource{} ) // NewDatabaseDataSource is a helper function to simplify the provider implementation. func NewDatabaseDataSource() datasource.DataSource { return &databaseDataSource{} } // dataSourceModel maps the data source schema data. type dataSourceModel struct { postgresflexalpha2.DatabaseModel TerraformID types.String `tfsdk:"id"` } // databaseDataSource is the data source implementation. type databaseDataSource struct { client *postgresflexalpha.APIClient providerData core.ProviderData } // Metadata returns the data source type name. func (r *databaseDataSource) Metadata( _ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse, ) { resp.TypeName = req.ProviderTypeName + "_postgresflexalpha_database" } // Configure adds the provider configured client to the data source. func (r *databaseDataSource) Configure( ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse, ) { var ok bool r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics) if !ok { return } apiClient := postgresflexUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics) if resp.Diagnostics.HasError() { return } r.client = apiClient tflog.Info(ctx, "Postgres Flex database client configured") } // Schema defines the schema for the data source. func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { s := postgresflexalpha2.DatabaseDataSourceSchema(ctx) s.Attributes["id"] = schema.StringAttribute{ Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," + "`database_id`\\\".\",", Computed: true, } resp.Schema = s } // Read fetches the data for the data source. func (r *databaseDataSource) Read( ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse, ) { // nolint:gocritic // function signature required by Terraform var model dataSourceModel diags := req.Config.Get(ctx, &model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } ctx = core.InitProviderContext(ctx) projectId := model.ProjectId.ValueString() instanceId := model.InstanceId.ValueString() region := r.providerData.GetRegionWithOverride(model.Region) ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "region", region) databaseResp, err := r.getDatabaseByNameOrID(ctx, &model, projectId, region, instanceId, &resp.Diagnostics) if resp.Diagnostics.HasError() { return } if err != nil { handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId) resp.State.RemoveResource(ctx) return } ctx = core.LogResponse(ctx) // Map response body to schema and populate Computed attribute values err = mapFields(databaseResp, &model, region) if err != nil { core.LogAndAddError( ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Processing API payload: %v", err), ) return } // Set refreshed state diags = resp.State.Set(ctx, model) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } tflog.Info(ctx, "Postgres Flex database read") } // getDatabaseByNameOrID retrieves a single database by ensuring either a unique ID or name is provided. func (r *databaseDataSource) getDatabaseByNameOrID( ctx context.Context, model *dataSourceModel, projectId, region, instanceId string, diags *diag.Diagnostics, ) (*postgresflexalpha.ListDatabase, error) { isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown() if (isIdSet && isNameSet) || (!isIdSet && !isNameSet) { diags.AddError( "Invalid configuration", "Exactly one of 'id' or 'name' must be specified.", ) return nil, nil } if isIdSet { databaseId := model.DatabaseId.ValueInt64() ctx = tflog.SetField(ctx, "database_id", databaseId) return getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId) } databaseName := model.Name.ValueString() ctx = tflog.SetField(ctx, "name", databaseName) return getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName) } // handleReadError centralizes API error handling for the Read operation. func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) { utils.LogError( ctx, diags, err, "Reading database", fmt.Sprintf( "Could not retrieve database for instance %q in project %q.", instanceId, projectId, ), map[int]string{ http.StatusBadRequest: fmt.Sprintf( "Invalid request parameters for project %q and instance %q.", projectId, instanceId, ), http.StatusNotFound: fmt.Sprintf( "Database, instance %q, or project %q not found.", instanceId, projectId, ), http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId), }, ) }